From 8d131153bc2d8cb16eefa648f68119fb71e4acbd Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Tue, 30 Mar 2021 08:43:51 +0200 Subject: [PATCH 001/115] Add test badge --- .github/workflows/python-app.yml | 2 +- CHANGELOG.rst | 8 ++++++++ README.md | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 7c800b33..43fd6b2d 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -1,7 +1,7 @@ # This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -name: Python application +name: Pytest & Flake8 on: push: diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b974e3bd..80def41c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -8,7 +8,15 @@ and this project adheres to `Semantic Versioning Date: Tue, 30 Mar 2021 15:02:26 +0200 Subject: [PATCH 002/115] Add example from Evan Mason about normalized lifetime --- .../pet_normalised_lifetime.py | 78 ++++++++++++ .../pet_normalised_lifetime.ipynb | 119 ++++++++++++++++++ ..._global_allsat_phy_l4_20190223_20190226.nc | 1 + src/py_eddy_tracker/observations/network.py | 2 +- 4 files changed, 199 insertions(+), 1 deletion(-) create mode 100644 examples/10_tracking_diagnostics/pet_normalised_lifetime.py create mode 100644 notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb create mode 120000 share/nrt_global_allsat_phy_l4_20190223_20190226.nc diff --git a/examples/10_tracking_diagnostics/pet_normalised_lifetime.py b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py new file mode 100644 index 00000000..372b66c0 --- /dev/null +++ b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py @@ -0,0 +1,78 @@ +""" +Normalised Eddy Lifetimes +========================= + +Example from Evan Mason +""" +from matplotlib import pyplot as plt +from numba import njit +from numpy import interp, linspace, zeros +from py_eddy_tracker_sample import get_demo_path + +from py_eddy_tracker.observations.tracking import TrackEddiesObservations + + +# %% +@njit(cache=True) +def sum_profile(x_new, y, out): + """Will sum all interpolated given array""" + out += interp(x_new, linspace(0, 1, y.size), y) + + +class MyObs(TrackEddiesObservations): + def eddy_norm_lifetime(self, name, nb, factor=1): + """ + :param str,array name: Array or field name + :param int nb: size of output array + """ + y = self.parse_varname(name) + x = linspace(0, 1, nb) + out = zeros(nb, dtype=y.dtype) + nb_track = 0 + for i, b0, b1 in self.iter_on("track"): + y_ = y[i] + size_ = y_.size + if size_ == 0: + continue + sum_profile(x, y_, out) + nb_track += 1 + return x, out / nb_track * factor + + +# %% +# Load atlas +# ---------- +kw = dict(include_vars=("speed_radius", "amplitude", "track")) +a = MyObs.load_file( + get_demo_path("eddies_med_adt_allsat_dt2018/Anticyclonic.zarr"), **kw +) +c = MyObs.load_file(get_demo_path("eddies_med_adt_allsat_dt2018/Cyclonic.zarr"), **kw) + +nb_max_a = a.nb_obs_by_track.max() +nb_max_c = c.nb_obs_by_track.max() + +# %% +# Compute normalize lifetime +# -------------------------- + +# Radius +AC_radius = a.eddy_norm_lifetime("speed_radius", nb=nb_max_a, factor=1e-3) +CC_radius = c.eddy_norm_lifetime("speed_radius", nb=nb_max_c, factor=1e-3) +# Amplitude +AC_amplitude = a.eddy_norm_lifetime("amplitude", nb=nb_max_a, factor=1e2) +CC_amplitude = c.eddy_norm_lifetime("amplitude", nb=nb_max_c, factor=1e2) + +# %% +# Figure +# ------ +fig, axs = plt.subplots(nrows=2, figsize=(8, 6)) + +axs[0].set_title("Normalised Mean Radius") +axs[0].plot(*AC_radius), axs[0].plot(*CC_radius) +axs[0].set_ylabel("Radius (km)"), axs[0].grid() +axs[0].set_xlim(0, 1), axs[0].set_ylim(0, None) + +axs[1].set_title("Normalised Mean Amplitude") +axs[1].plot(*AC_amplitude, label="AC"), axs[1].plot(*CC_amplitude, label="CC") +axs[1].set_ylabel("Amplitude (cm)"), axs[1].grid(), axs[1].legend() +_ = axs[1].set_xlim(0, 1), axs[1].set_ylim(0, None) diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb new file mode 100644 index 00000000..6c98a1cf --- /dev/null +++ b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb @@ -0,0 +1,119 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Normalised Eddy Lifetimes\n\nExample from Evan Mason\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt\nfrom numba import njit\nfrom numpy import interp, linspace, zeros\nfrom py_eddy_tracker_sample import get_demo_path\n\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "@njit(cache=True)\ndef sum_profile(x_new, y, out):\n \"\"\"Will sum all interpolated given array\"\"\"\n out += interp(x_new, linspace(0, 1, y.size), y)\n\n\nclass MyObs(TrackEddiesObservations):\n def eddy_norm_lifetime(self, name, nb, factor=1):\n \"\"\"\n :param str,array name: Array or field name\n :param int nb: size of output array\n \"\"\"\n y = self.parse_varname(name)\n x = linspace(0, 1, nb)\n out = zeros(nb, dtype=y.dtype)\n nb_track = 0\n for i, b0, b1 in self.iter_on(\"track\"):\n y_ = y[i]\n size_ = y_.size\n if size_ == 0:\n continue\n sum_profile(x, y_, out)\n nb_track += 1\n return x, out / nb_track * factor" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Load atlas\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "kw = dict(include_vars=(\"speed_radius\", \"amplitude\", \"track\"))\na = MyObs.load_file(\n get_demo_path(\"eddies_med_adt_allsat_dt2018/Anticyclonic.zarr\"), **kw\n)\nc = MyObs.load_file(get_demo_path(\"eddies_med_adt_allsat_dt2018/Cyclonic.zarr\"), **kw)\n\nnb_max_a = a.nb_obs_by_track.max()\nnb_max_c = c.nb_obs_by_track.max()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Compute normalize lifetime\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Radius\nAC_radius = a.eddy_norm_lifetime(\"speed_radius\", nb=nb_max_a, factor=1e-3)\nCC_radius = c.eddy_norm_lifetime(\"speed_radius\", nb=nb_max_c, factor=1e-3)\n# Amplitude\nAC_amplitude = a.eddy_norm_lifetime(\"amplitude\", nb=nb_max_a, factor=1e2)\nCC_amplitude = c.eddy_norm_lifetime(\"amplitude\", nb=nb_max_c, factor=1e2)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Figure\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "fig, axs = plt.subplots(nrows=2, figsize=(8, 6))\n\naxs[0].set_title(\"Normalised Mean Radius\")\naxs[0].plot(*AC_radius), axs[0].plot(*CC_radius)\naxs[0].set_ylabel(\"Radius (km)\"), axs[0].grid()\naxs[0].set_xlim(0, 1), axs[0].set_ylim(0, None)\n\naxs[1].set_title(\"Normalised Mean Amplitude\")\naxs[1].plot(*AC_amplitude, label=\"AC\"), axs[1].plot(*CC_amplitude, label=\"CC\")\naxs[1].set_ylabel(\"Amplitude (cm)\"), axs[1].grid(), axs[1].legend()\n_ = axs[1].set_xlim(0, 1), axs[1].set_ylim(0, None)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/share/nrt_global_allsat_phy_l4_20190223_20190226.nc b/share/nrt_global_allsat_phy_l4_20190223_20190226.nc new file mode 120000 index 00000000..077ce7e6 --- /dev/null +++ b/share/nrt_global_allsat_phy_l4_20190223_20190226.nc @@ -0,0 +1 @@ +../src/py_eddy_tracker/data/nrt_global_allsat_phy_l4_20190223_20190226.nc \ No newline at end of file diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 53af9f30..6cd18d16 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -575,7 +575,7 @@ def numbering_segment(self, start=0): New numbering of segment """ for i, _, _ in self.iter_on("track"): - new_numbering(self.segment[i]) + new_numbering(self.segment[i], start) def numbering_network(self, start=1): """ From 415931732bc9d1f72bef58ec6cda1c2457ece562 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Wed, 31 Mar 2021 13:39:03 +0200 Subject: [PATCH 003/115] Add minigallery for interp_grid --- examples/10_tracking_diagnostics/pet_normalised_lifetime.py | 4 ++-- .../10_tracking_diagnostics/pet_normalised_lifetime.ipynb | 2 +- src/py_eddy_tracker/observations/observation.py | 2 ++ 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/10_tracking_diagnostics/pet_normalised_lifetime.py b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py index 372b66c0..73e5274e 100644 --- a/examples/10_tracking_diagnostics/pet_normalised_lifetime.py +++ b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py @@ -52,8 +52,8 @@ def eddy_norm_lifetime(self, name, nb, factor=1): nb_max_c = c.nb_obs_by_track.max() # %% -# Compute normalize lifetime -# -------------------------- +# Compute normalised lifetime +# --------------------------- # Radius AC_radius = a.eddy_norm_lifetime("speed_radius", nb=nb_max_a, factor=1e-3) diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb index 6c98a1cf..867e081f 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb @@ -62,7 +62,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Compute normalize lifetime\n\n" + "## Compute normalised lifetime\n\n" ] }, { diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 73df5734..839dbca1 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2220,6 +2220,8 @@ def interp_grid( :param str method: 'center', 'mean', 'max', 'min', 'nearest' :param str dtype: if None we use var dtype :param bool intern: Use extern or intern contour + + .. minigallery:: py_eddy_tracker.EddiesObservations.interp_grid """ if method in ("center", "nearest"): return grid_object.interp(varname, self.longitude, self.latitude, method) From e5bbf954bc4b6adb3d56331dbbcbb89fa7f2653f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 31 Mar 2021 16:24:08 +0200 Subject: [PATCH 004/115] optimisation function relatives --- src/py_eddy_tracker/observations/network.py | 79 +++++++++++++++------ 1 file changed, 59 insertions(+), 20 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 6cd18d16..7a57bbca 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -311,7 +311,7 @@ def sort(self, order=("track", "segment", "time")): """ sort observations - :param tuple order: order or sorting. Passed to `np.argsort` + :param tuple order: order or sorting. Passed to :func:`numpy.argsort` """ index_order = self.obs.argsort(order=order) @@ -485,39 +485,78 @@ def segment_relative_order(self, seg_origine): d[i0:i1] = v return d - def relative(self, i_obs, order=2, direct=True, only_past=False, only_future=False): + def relatives(self, obs, order=2): """ - Extract the segments at a certain order from one observation. + Extract the segments at a certain order from multiple observations. - :param list obs: indice of observation for relative computation + :param iterable,int obs: indices of observation for relatives computation. Can be one observation (int) or collection of observations (iterable(int)) :param int order: order of relatives wanted. 0 means only observations in obs, 1 means direct relatives, ... :return: all segments relatives :rtype: EddiesObservations """ + segment = self.segment_track_array + previous_obs, next_obs = self.previous_obs, self.next_obs - d = self.segment_relative_order(self.segment[i_obs]) - m = (d <= order) * (d != -1) - return self.extract_with_mask(m) + segments_connexion = dict() - def relatives(self, obs, order=2, direct=True, only_past=False, only_future=False): - """ - Extract the segments at a certain order from multiple observations. + for i_slice, seg, _ in self.iter_on(segment): + if i_slice.start == i_slice.stop: + continue - :param list obs: indices of observation for relatives computation - :param int order: order of relatives wanted. 0 means only observations in obs, 1 means direct relatives, ... + i_p, i_n = previous_obs[i_slice.start], next_obs[i_slice.stop - 1] + p_seg, n_seg = segment[i_p], segment[i_n] - :return: all segments relatives - :rtype: EddiesObservations - """ + # dumping slice into dict + if seg not in segments_connexion: + segments_connexion[seg] = [i_slice, []] + else: + segments_connexion[seg][0] = i_slice - mask = zeros(self.segment.shape, dtype=bool) - for i_obs in obs: - d = self.segment_relative_order(self.segment[i_obs]) - mask += (d <= order) * (d != -1) + if i_p != -1: - return self.extract_with_mask(mask) + if p_seg not in segments_connexion: + segments_connexion[p_seg] = [None, []] + + # backward + segments_connexion[seg][1].append(p_seg) + segments_connexion[p_seg][1].append(seg) + + if i_n != -1: + if n_seg not in segments_connexion: + segments_connexion[n_seg] = [None, []] + + # forward + segments_connexion[seg][1].append(n_seg) + segments_connexion[n_seg][1].append(seg) + + + i_obs = ( + [obs] + if not hasattr(obs, "__iter__") + else obs + ) + import numpy as np + + distance = zeros(segment.size, dtype=np.uint16) - 1 + + def loop(seg, dist=1): + i_slice, links = segments_connexion[seg] + d = distance[i_slice.start] + + if dist < d and dist <= order: + distance[i_slice] = dist + for _seg in links: + loop(_seg, dist + 1) + + for indice in i_obs: + loop(segment[indice], 0) + + return self.extract_with_mask(distance <= order) + + # keep old names, for backward compatibility + relative = relatives def close_network(self, other, nb_obs_min=10, **kwargs): """ From 74e9536003c7760732760f170b869cfd1268e717 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 31 Mar 2021 16:25:26 +0200 Subject: [PATCH 005/115] optimisation poly_indexs and bug correction with np.nan casting --- src/py_eddy_tracker/observations/observation.py | 5 ++++- src/py_eddy_tracker/poly.py | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 839dbca1..f3e0ee75 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2050,7 +2050,10 @@ def contains(self, x, y, intern=False): :rtype: array[int32] """ xname, yname = self.intern(intern) - return poly_indexs(x, y, self[xname], self[yname]) + m = ~ (isnan(x) + isnan(y)) + i = -ones(x.shape, dtype='i4') + i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) + return i def inside(self, x, y, intern=False): """ diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 2bf34509..864607ff 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -815,7 +815,7 @@ def box_indexes(x, y, step): @njit(cache=True) -def poly_indexs_(x_p, y_p, x_c, y_c): +def poly_indexs(x_p, y_p, x_c, y_c): """ Index of contour for each postion inside a contour, -1 in case of no contour @@ -874,7 +874,7 @@ def poly_indexs_(x_p, y_p, x_c, y_c): @njit(cache=True) -def poly_indexs(x_p, y_p, x_c, y_c): +def poly_indexs_old(x_p, y_p, x_c, y_c): """ index of contour for each postion inside a contour, -1 in case of no contour From 64efe1b2fd802981be7806791806240d4b8424e0 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Wed, 31 Mar 2021 22:38:22 +0200 Subject: [PATCH 006/115] Add some mini gallery --- src/py_eddy_tracker/observations/observation.py | 4 ++++ src/py_eddy_tracker/poly.py | 1 - 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index f3e0ee75..0d0f45b7 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -553,6 +553,8 @@ def iter_on(self, xname, bins=None): :param str,array xname: :param array bins: bounds of each bin , :return: index or mask, bound low, bound up + + .. minigallery:: py_eddy_tracker.EddiesObservations.iter_on """ x = self[xname] if isinstance(xname, str) else xname d = x[1:] - x[:-1] @@ -593,6 +595,8 @@ def iter_on(self, xname, bins=None): def align_on(self, other, var_name="time", **kwargs): """ Align the time indexes of two datasets. + + .. minigallery:: py_eddy_tracker.EddiesObservations.align_on """ iter_self = self.iter_on(var_name, **kwargs) iter_other = other.iter_on(var_name, **kwargs) diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 864607ff..ad8fc148 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -536,7 +536,6 @@ def fit_ellips(x, y): https://en.wikipedia.org/wiki/Ellipse """ - # x,y = x[1:],y[1:] nb = x.shape[0] datas = ones((nb, 5), dtype=x.dtype) datas[:, 0] = x ** 2 From d81604d8bbedd833cf4a108028ea02033c6c9779 Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Thu, 1 Apr 2021 17:02:01 +0200 Subject: [PATCH 007/115] - add ellipse fit - minor english modifs --- examples/16_network/pet_ioannou_2017_case.py | 44 ++++++++++++++++++++ src/py_eddy_tracker/dataset/grid.py | 13 ++++-- src/py_eddy_tracker/eddy_feature.py | 26 ++++++------ src/py_eddy_tracker/poly.py | 10 ++--- 4 files changed, 72 insertions(+), 21 deletions(-) diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 7669b010..54f124f7 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -188,3 +188,47 @@ def update_axes(ax, mappable=None): m = close_to_i3.scatter_timeline(ax, "shape_error_e", vmin=14, vmax=70, **kw) cb = update_axes(ax, m["scatter"]) cb.set_label("Effective shape error") + +# %% +# Rotation angle +# -------------- +from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates +from py_eddy_tracker.poly import fit_ellips +theta_ = list() +a_ = list() +b_ = list() +for obs in close_to_i3: + x, y = obs['contour_lon_s'], obs['contour_lat_s'] + x0_, y0_ = x.mean(), y.mean() + x_, y_ = coordinates_to_local(x, y, x0_, y0_) + x0, y0, a, b, theta = fit_ellips(x_, y_) + theta_.append(theta) + a_.append(a) + b_.append(b) +a_=array(a_) +b_=array(b_) + +# %% +# Theta +ax = timeline_axes() +m = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi/2, vmax=pi/2, cmap='hsv') +cb = update_axes(ax, m["scatter"]) + +# %% +# A +ax = timeline_axes() +m = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap='Spectral_r') +cb = update_axes(ax, m["scatter"]) + +# %% +# B +ax = timeline_axes() +m = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap='Spectral_r') +cb = update_axes(ax, m["scatter"]) + +# %% +# A/B +ax = timeline_axes() +m = close_to_i3.scatter_timeline(ax, a_/b_, vmin=1, vmax=2, cmap='Spectral_r') +cb = update_axes(ax, m["scatter"]) + diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 6a4624dd..28dc8330 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -609,17 +609,17 @@ def eddy_identification( :param str vname: Grid name of v speed component :param datetime.datetime date: Date which will be stored in object to date data :param float,int step: Height between two layers in m - :param float,int shape_error: Maximal error allowed for outter contour in % + :param float,int shape_error: Maximal error allowed for outermost contour in % :param int sampling: Number of points to store contours and speed profile :param str sampling_method: Method to resample 'uniform' or 'visvalingam' :param (int,int),None pixel_limit: - Min and max number of pixels inside the inner and the outer contour to be considered as an eddy + Min and max number of pixels inside the inner and the outermost contour to be considered as an eddy :param float,None precision: Truncate values at the defined precision in m :param str force_height_unit: Unit used for height unit :param str force_speed_unit: Unit used for speed unit :param dict kwargs: Argument given to amplitude - :return: Return a list of 2 elements: Anticyclone and Cyclone + :return: Return a list of 2 elements: Anticyclones and Cyclones :rtype: py_eddy_tracker.observations.observation.EddiesObservations .. minigallery:: py_eddy_tracker.GridDataset.eddy_identification @@ -729,7 +729,8 @@ def eddy_identification( for contour in contour_paths: if contour.used: continue - # FIXME : center could be not in contour and fit on raw sampling + # FIXME : center could be outside the contour due to the fit + # FIXME : warning : the fit is made on raw sampling _, _, _, aerr = contour.fit_circle() # Filter for shape @@ -752,6 +753,7 @@ def eddy_identification( ): continue + # Test the number of pixels within the outermost contour # FIXME : Maybe limit max must be replace with a maximum of surface if ( contour.nb_pixel < pixel_limit[0] @@ -760,6 +762,9 @@ def eddy_identification( contour.reject = 3 continue + # Here the considered contour passed shape_error test, masked_pixels test, + # values strictly above (AEs) or below (CEs) the contour, number_pixels test) + # Compute amplitude reset_centroid, amp = self.get_amplitude( contour, diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index 3e6c86ea..037beb35 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -31,7 +31,7 @@ class Amplitude(object): """ Class to calculate *amplitude* and counts of *local maxima/minima* - within a closed region of a sea level anomaly field. + within a closed region of a sea surface height field. """ EPSILON = 1e-8 @@ -66,8 +66,8 @@ def __init__( :param array data: :param float interval: :param int mle: maximum number of local maxima in contour - :param int nb_step_min: number of interval to consider like an eddy - :param int nb_step_to_be_mle: number of interval to be consider like another maxima + :param int nb_step_min: number of intervals to consider an eddy + :param int nb_step_to_be_mle: number of intervals to be considered as an another maxima """ # Height of the contour @@ -102,8 +102,9 @@ def __init__( self.nb_pixel = i_x.shape[0] # Only pixel in contour + # FIXME : change sla by ssh as the grid can be adt? self.sla = data[contour.pixels_index] - # Amplitude which will be provide + # Amplitude which will be provided self.amplitude = 0 # Maximum local extrema accepted self.mle = mle @@ -115,9 +116,10 @@ def within_amplitude_limits(self): def all_pixels_below_h0(self, level): """ Check CSS11 criterion 1: The SSH values of all of the pixels - are below a given SSH threshold for cyclonic eddies. + are below (above) a given SSH threshold for cyclonic (anticyclonic) + eddies. """ - # In some case pixel value must be very near of contour bounds + # In some cases pixel value may be very close to the contour bounds if self.sla.mask.any() or ((self.sla.data - self.h_0) > self.EPSILON).any(): return False else: @@ -293,10 +295,10 @@ class Contours(object): Attributes: contour: - A matplotlib contour object of high-pass filtered SLA + A matplotlib contour object of high-pass filtered SSH eddy: - A tracklist object holding the SLA data + A tracklist object holding the SSH data grd: A grid object @@ -406,7 +408,7 @@ def __init__(self, x, y, z, levels, wrap_x=False, keep_unclose=False): fig = Figure() ax = fig.add_subplot(111) if wrap_x: - logger.debug("wrapping activate to compute contour") + logger.debug("wrapping activated to compute contour") x = concatenate((x, x[:1] + 360)) z = ma.concatenate((z, z[:1])) logger.debug("X shape : %s", x.shape) @@ -602,8 +604,8 @@ def display( Must be 'shape_error', 'x', 'y' or 'radius'. If define display_criterion is not use. bins argument must be define - :param array bins: bins use to colorize contour - :param str cmap: Name of cmap to use for field display + :param array bins: bins used to colorize contour + :param str cmap: Name of cmap for field display :param dict kwargs: look at :py:meth:`matplotlib.collections.LineCollection` .. minigallery:: py_eddy_tracker.Contours.display @@ -688,7 +690,7 @@ def display( ax.autoscale_view() def label_contour_unused_which_contain_eddies(self, eddies): - """Select contour which contain several eddies""" + """Select contour containing several eddies""" if eddies.sign_type == 1: # anticyclonic sl = slice(None, -1) diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index ad8fc148..af621423 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -474,13 +474,13 @@ def polygon_overlap(p0, p1, minimal_area=False): return cost -# FIXME: only one function are needed +# FIXME: only one function is needed @njit(cache=True) def fit_circle(x, y): """ From a polygon, function will fit a circle. - Must be call with local coordinates (in m, to get a radius in m). + Must be called with local coordinates (in m, to get a radius in m). :param array x: x of polygon :param array y: y of polygon @@ -510,11 +510,11 @@ def fit_circle(x, y): radius **= 0.5 x0 *= scale y0 *= scale - # radius of fitted circle + # radius of fit circle radius *= scale - # center X-position of fitted circle + # center X-position of fit circle x0 += x_mean - # center Y-position of fitted circle + # center Y-position of fit circle y0 += y_mean err = shape_error(x, y, x0, y0, radius) From 7c8fc736ed1ee37b7f627b14b0ff80565a245293 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Sun, 4 Apr 2021 22:43:35 +0200 Subject: [PATCH 008/115] Add visvalingam test --- src/py_eddy_tracker/observations/network.py | 12 +++--------- src/py_eddy_tracker/observations/observation.py | 4 ++-- tests/test_poly.py | 16 +++++++++++++++- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 7a57bbca..115197de 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -16,6 +16,7 @@ in1d, ones, uint32, + uint16, unique, where, zeros, @@ -513,7 +514,6 @@ def relatives(self, obs, order=2): else: segments_connexion[seg][0] = i_slice - if i_p != -1: if p_seg not in segments_connexion: @@ -531,15 +531,9 @@ def relatives(self, obs, order=2): segments_connexion[seg][1].append(n_seg) segments_connexion[n_seg][1].append(seg) + i_obs = [obs] if not hasattr(obs, "__iter__") else obs - i_obs = ( - [obs] - if not hasattr(obs, "__iter__") - else obs - ) - import numpy as np - - distance = zeros(segment.size, dtype=np.uint16) - 1 + distance = zeros(segment.size, dtype=uint16) - 1 def loop(seg, dist=1): i_slice, links = segments_connexion[seg] diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 0d0f45b7..8decca58 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2054,8 +2054,8 @@ def contains(self, x, y, intern=False): :rtype: array[int32] """ xname, yname = self.intern(intern) - m = ~ (isnan(x) + isnan(y)) - i = -ones(x.shape, dtype='i4') + m = ~(isnan(x) + isnan(y)) + i = -ones(x.shape, dtype="i4") i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) return i diff --git a/tests/test_poly.py b/tests/test_poly.py index b2aacb73..1a0edb6f 100644 --- a/tests/test_poly.py +++ b/tests/test_poly.py @@ -1,7 +1,13 @@ from numpy import array, pi from pytest import approx -from py_eddy_tracker.poly import convex, fit_circle, get_convex_hull, poly_area_vertice +from py_eddy_tracker.poly import ( + convex, + fit_circle, + get_convex_hull, + poly_area_vertice, + visvalingam, +) # Vertices for next test V = array(((2, 2, 3, 3, 2), (-10, -9, -9, -10, -10))) @@ -29,3 +35,11 @@ def test_convex(): def test_convex_hull(): assert convex(*get_convex_hull(*V_concave)) is True + + +def test_visvalingam(): + x = array([1, 2, 3, 4, 5, 6.75, 6, 1]) + y = array([-0.5, -1.5, -1, -1.75, -1, -1, -0.5, -0.5]) + x_, y_ = visvalingam(x, y, 6) + assert ([1, 2, 3, 4, 6, 1] == x_).all() + assert ([-0.5, -1.5, -1, -1.75, -0.5, -0.5] == y_).all() From caff46f62e80fd19bc911cae4eb4f0e51682e2cc Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Sun, 4 Apr 2021 22:47:36 +0200 Subject: [PATCH 009/115] Activate test for all branches --- .github/workflows/python-app.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 43fd6b2d..7fc9f385 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -3,11 +3,7 @@ name: Pytest & Flake8 -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] +on: [push, pull_request] jobs: build: From 1d81bc0732b81e945111d7e5cbba5933ef684ada Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 5 Apr 2021 16:46:56 +0200 Subject: [PATCH 010/115] Modify gui import --- examples/06_grid_manipulation/pet_advect.py | 6 +++--- examples/06_grid_manipulation/pet_lavd.py | 4 ++-- examples/07_cube_manipulation/pet_cube.py | 4 ++-- .../07_cube_manipulation/pet_lavd_detection.py | 4 ++-- examples/16_network/pet_atlas.py | 4 ++-- examples/16_network/pet_ioannou_2017_case.py | 4 ++-- examples/16_network/pet_relative.py | 12 ++++++------ examples/16_network/pet_replay_segmentation.py | 4 ++-- examples/16_network/pet_segmentation_anim.py | 6 +++--- .../06_grid_manipulation/pet_advect.ipynb | 8 ++++---- .../06_grid_manipulation/pet_lavd.ipynb | 6 +++--- .../07_cube_manipulation/pet_cube.ipynb | 6 +++--- .../07_cube_manipulation/pet_lavd_detection.ipynb | 6 +++--- notebooks/python_module/16_network/pet_atlas.ipynb | 6 +++--- .../16_network/pet_ioannou_2017_case.ipynb | 6 +++--- .../python_module/16_network/pet_relative.ipynb | 14 +++++++------- .../16_network/pet_replay_segmentation.ipynb | 4 ++-- .../16_network/pet_segmentation_anim.ipynb | 8 ++++---- src/py_eddy_tracker/gui.py | 7 +++++-- 19 files changed, 61 insertions(+), 58 deletions(-) diff --git a/examples/06_grid_manipulation/pet_advect.py b/examples/06_grid_manipulation/pet_advect.py index 13052af5..0e00697f 100644 --- a/examples/06_grid_manipulation/pet_advect.py +++ b/examples/06_grid_manipulation/pet_advect.py @@ -10,9 +10,9 @@ from matplotlib.animation import FuncAnimation from numpy import arange, isnan, meshgrid, ones -import py_eddy_tracker.gui from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import RegularGridDataset +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.observation import EddiesObservations # %% @@ -32,7 +32,7 @@ # %% # Quiver from u/v with eddies fig = plt.figure(figsize=(10, 5)) -ax = fig.add_axes([0, 0, 1, 1], projection="full_axes") +ax = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES) ax.set_xlim(19, 30), ax.set_ylim(31, 36.5), ax.grid() x, y = meshgrid(g.x_c, g.y_c) a.filled(ax, facecolors="r", alpha=0.1), c.filled(ax, facecolors="b", alpha=0.1) @@ -82,7 +82,7 @@ def save(self, *args, **kwargs): def anim_ax(**kw): t = 0 fig = plt.figure(figsize=(10, 5), dpi=55) - axes = fig.add_axes([0, 0, 1, 1], projection="full_axes") + axes = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES) axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid() a.filled(axes, facecolors="r", alpha=0.1), c.filled(axes, facecolors="b", alpha=0.1) line = axes.plot([], [], "k", **kw)[0] diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index e0dbbb54..ed21738f 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -24,16 +24,16 @@ from matplotlib.animation import FuncAnimation from numpy import arange, meshgrid, zeros -import py_eddy_tracker.gui from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import RegularGridDataset +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.observation import EddiesObservations # %% def start_ax(title="", dpi=90): fig = plt.figure(figsize=(16, 9), dpi=dpi) - ax = fig.add_axes([0, 0, 1, 1], projection="full_axes") + ax = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES) ax.set_xlim(0, 32), ax.set_ylim(28, 46) ax.set_title(title) return fig, ax, ax.text(3, 32, "", fontsize=20) diff --git a/examples/07_cube_manipulation/pet_cube.py b/examples/07_cube_manipulation/pet_cube.py index 6c0db253..a674359d 100644 --- a/examples/07_cube_manipulation/pet_cube.py +++ b/examples/07_cube_manipulation/pet_cube.py @@ -12,10 +12,10 @@ from matplotlib.animation import FuncAnimation from numpy import arange, isnan, meshgrid, ones -import py_eddy_tracker.gui from py_eddy_tracker import start_logger from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection +from py_eddy_tracker.gui import GUI_AXES start_logger().setLevel("ERROR") @@ -70,7 +70,7 @@ def save(self, *args, **kwargs): # Function def anim_ax(**kw): fig = plt.figure(figsize=(10, 5), dpi=55) - axes = fig.add_axes([0, 0, 1, 1], projection="full_axes") + axes = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES) axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid() line = axes.plot([], [], "k", **kw)[0] return fig, axes.text(21, 32.1, ""), line diff --git a/examples/07_cube_manipulation/pet_lavd_detection.py b/examples/07_cube_manipulation/pet_lavd_detection.py index dc3a83ff..1fa4d60b 100644 --- a/examples/07_cube_manipulation/pet_lavd_detection.py +++ b/examples/07_cube_manipulation/pet_lavd_detection.py @@ -23,10 +23,10 @@ from matplotlib import pyplot as plt from numpy import arange, isnan, ma, meshgrid, zeros -import py_eddy_tracker.gui from py_eddy_tracker import start_logger from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection, RegularGridDataset +from py_eddy_tracker.gui import GUI_AXES start_logger().setLevel("ERROR") @@ -47,7 +47,7 @@ def from_(cls, x, y, z): # %% def start_ax(title="", dpi=90): fig = plt.figure(figsize=(12, 5), dpi=dpi) - ax = fig.add_axes([0.05, 0.08, 0.9, 0.9], projection="full_axes") + ax = fig.add_axes([0.05, 0.08, 0.9, 0.9], projection=GUI_AXES) ax.set_xlim(-6, 36), ax.set_ylim(31, 45) ax.set_title(title) return fig, ax, ax.text(3, 32, "", fontsize=20) diff --git a/examples/16_network/pet_atlas.py b/examples/16_network/pet_atlas.py index 540d312d..7f86790a 100644 --- a/examples/16_network/pet_atlas.py +++ b/examples/16_network/pet_atlas.py @@ -5,8 +5,8 @@ from matplotlib import pyplot as plt from numpy import ma -import py_eddy_tracker.gui from py_eddy_tracker.data import get_remote_demo_sample +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations n = NetworkObservations.load_file( @@ -26,7 +26,7 @@ # Functions def start_axes(title): fig = plt.figure(figsize=(13, 5)) - ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection="full_axes") + ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES) ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46) ax.set_aspect("equal") ax.set_title(title, weight="bold") diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 7669b010..781514fe 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -16,9 +16,9 @@ from matplotlib.ticker import FuncFormatter from numpy import arange, where -import py_eddy_tracker.gui from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations @@ -48,7 +48,7 @@ def formatter(x, pos): def start_axes(title=""): fig = plt.figure(figsize=(13, 6)) - ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection="full_axes") + ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES) ax.set_xlim(19, 29), ax.set_ylim(31, 35.5) ax.set_aspect("equal") ax.set_title(title, weight="bold") diff --git a/examples/16_network/pet_relative.py b/examples/16_network/pet_relative.py index 2759edb4..c4989edb 100644 --- a/examples/16_network/pet_relative.py +++ b/examples/16_network/pet_relative.py @@ -5,8 +5,8 @@ from matplotlib import pyplot as plt from numpy import where -import py_eddy_tracker.gui from py_eddy_tracker import data +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations # %% @@ -266,7 +266,7 @@ # %% # Only a map can be tricky to understand, with a timeline it's easier! fig = plt.figure(figsize=(15, 8)) -ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES) n.plot(ax, color_cycle=n.COLORS) ax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid() ax = fig.add_axes([0.08, 0.7, 0.7, 0.3]) @@ -278,7 +278,7 @@ # ----------------- # Display the position of the eddies after a merging fig = plt.figure(figsize=(15, 8)) -ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES) n.plot(ax, color_cycle=n.COLORS) m1, m0, m0_stop = n.merging_event(triplet=True) m1.display(ax, color="violet", lw=2, label="Eddies after merging") @@ -296,7 +296,7 @@ # ------------------ # Display the position of the eddies before a splitting fig = plt.figure(figsize=(15, 8)) -ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES) n.plot(ax, color_cycle=n.COLORS) s0, s1, s1_start = n.spliting_event(triplet=True) s0.display(ax, color="violet", lw=2, label="Eddies before splitting") @@ -314,7 +314,7 @@ # --------------- # Display the starting position of non-splitted eddies fig = plt.figure(figsize=(15, 8)) -ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES) birth = n.birth_event() birth.display(ax) ax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid() @@ -325,7 +325,7 @@ # --------------- # Display the last position of non-merged eddies fig = plt.figure(figsize=(15, 8)) -ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES) death = n.death_event() death.display(ax) ax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid() diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index c33028fc..757854d5 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -11,8 +11,8 @@ from matplotlib.ticker import FuncFormatter from numpy import where -import py_eddy_tracker.gui from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.tracking import TrackEddiesObservations @@ -24,7 +24,7 @@ def formatter(x, pos): def start_axes(title=""): fig = plt.figure(figsize=(13, 6)) - ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection="full_axes") + ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES) ax.set_xlim(19, 29), ax.set_ylim(31, 35.5) ax.set_aspect("equal") ax.set_title(title, weight="bold") diff --git a/examples/16_network/pet_segmentation_anim.py b/examples/16_network/pet_segmentation_anim.py index b2757809..340163a1 100644 --- a/examples/16_network/pet_segmentation_anim.py +++ b/examples/16_network/pet_segmentation_anim.py @@ -10,8 +10,8 @@ from matplotlib.colors import ListedColormap from numpy import ones, where -import py_eddy_tracker.gui from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.tracking import TrackEddiesObservations @@ -104,7 +104,7 @@ def update(i_frame): fig = plt.figure(figsize=(16, 9), dpi=60) -ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES) ax.set_title(f"{len(e)} observations to segment") ax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid() vmax = TRACKS[-1].max() @@ -121,6 +121,6 @@ def update(i_frame): # Final Result # ------------ fig = plt.figure(figsize=(16, 9)) -ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection="full_axes") +ax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES) ax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid() _ = ax.scatter(e.lon, e.lat, c=TRACKS[-1], cmap=cmap, vmin=0, vmax=vmax, s=20) diff --git a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb index 53725a05..b660df52 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, isnan, meshgrid, ones\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import RegularGridDataset\nfrom py_eddy_tracker.observations.observation import EddiesObservations" + "import re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, isnan, meshgrid, ones\n\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import RegularGridDataset\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.observation import EddiesObservations" ] }, { @@ -80,7 +80,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(10, 5))\nax = fig.add_axes([0, 0, 1, 1], projection=\"full_axes\")\nax.set_xlim(19, 30), ax.set_ylim(31, 36.5), ax.grid()\nx, y = meshgrid(g.x_c, g.y_c)\na.filled(ax, facecolors=\"r\", alpha=0.1), c.filled(ax, facecolors=\"b\", alpha=0.1)\n_ = ax.quiver(x.T, y.T, g.grid(\"u\"), g.grid(\"v\"), scale=20)" + "fig = plt.figure(figsize=(10, 5))\nax = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES)\nax.set_xlim(19, 30), ax.set_ylim(31, 36.5), ax.grid()\nx, y = meshgrid(g.x_c, g.y_c)\na.filled(ax, facecolors=\"r\", alpha=0.1), c.filled(ax, facecolors=\"b\", alpha=0.1)\n_ = ax.quiver(x.T, y.T, g.grid(\"u\"), g.grid(\"v\"), scale=20)" ] }, { @@ -145,7 +145,7 @@ }, "outputs": [], "source": [ - "def anim_ax(**kw):\n t = 0\n fig = plt.figure(figsize=(10, 5), dpi=55)\n axes = fig.add_axes([0, 0, 1, 1], projection=\"full_axes\")\n axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid()\n a.filled(axes, facecolors=\"r\", alpha=0.1), c.filled(axes, facecolors=\"b\", alpha=0.1)\n line = axes.plot([], [], \"k\", **kw)[0]\n return fig, axes.text(21, 32.1, \"\"), line, t\n\n\ndef update(i_frame, t_step):\n global t\n x, y = p.__next__()\n t += t_step\n l.set_data(x, y)\n txt.set_text(f\"T0 + {t:.1f} days\")" + "def anim_ax(**kw):\n t = 0\n fig = plt.figure(figsize=(10, 5), dpi=55)\n axes = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES)\n axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid()\n a.filled(axes, facecolors=\"r\", alpha=0.1), c.filled(axes, facecolors=\"b\", alpha=0.1)\n line = axes.plot([], [], \"k\", **kw)[0]\n return fig, axes.text(21, 32.1, \"\"), line, t\n\n\ndef update(i_frame, t_step):\n global t\n x, y = p.__next__()\n t += t_step\n l.set_data(x, y)\n txt.set_text(f\"T0 + {t:.1f} days\")" ] }, { @@ -262,7 +262,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb index 6cef91bc..67983cec 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, zeros\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import RegularGridDataset\nfrom py_eddy_tracker.observations.observation import EddiesObservations" + "import re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, zeros\n\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import RegularGridDataset\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.observation import EddiesObservations" ] }, { @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "def start_ax(title=\"\", dpi=90):\n fig = plt.figure(figsize=(16, 9), dpi=dpi)\n ax = fig.add_axes([0, 0, 1, 1], projection=\"full_axes\")\n ax.set_xlim(0, 32), ax.set_ylim(28, 46)\n ax.set_title(title)\n return fig, ax, ax.text(3, 32, \"\", fontsize=20)\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n cb = plt.colorbar(\n mappable,\n cax=ax.figure.add_axes([0.05, 0.1, 0.9, 0.01]),\n orientation=\"horizontal\",\n )\n cb.set_label(\"Vorticity integration along trajectory at initial position\")\n return cb\n\n\nkw_vorticity = dict(vmin=0, vmax=2e-5, cmap=\"viridis\")" + "def start_ax(title=\"\", dpi=90):\n fig = plt.figure(figsize=(16, 9), dpi=dpi)\n ax = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES)\n ax.set_xlim(0, 32), ax.set_ylim(28, 46)\n ax.set_title(title)\n return fig, ax, ax.text(3, 32, \"\", fontsize=20)\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n cb = plt.colorbar(\n mappable,\n cax=ax.figure.add_axes([0.05, 0.1, 0.9, 0.01]),\n orientation=\"horizontal\",\n )\n cb.set_label(\"Vorticity integration along trajectory at initial position\")\n return cb\n\n\nkw_vorticity = dict(vmin=0, vmax=2e-5, cmap=\"viridis\")" ] }, { @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb index 63cd36dc..a8ed7f1b 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "# sphinx_gallery_thumbnail_number = 2\nimport re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, isnan, meshgrid, ones\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\n\nstart_logger().setLevel(\"ERROR\")" + "# sphinx_gallery_thumbnail_number = 2\nimport re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, isnan, meshgrid, ones\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.gui import GUI_AXES\n\nstart_logger().setLevel(\"ERROR\")" ] }, { @@ -91,7 +91,7 @@ }, "outputs": [], "source": [ - "def anim_ax(**kw):\n fig = plt.figure(figsize=(10, 5), dpi=55)\n axes = fig.add_axes([0, 0, 1, 1], projection=\"full_axes\")\n axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid()\n line = axes.plot([], [], \"k\", **kw)[0]\n return fig, axes.text(21, 32.1, \"\"), line\n\n\ndef update(_):\n tt, xt, yt = f.__next__()\n mappable.set_data(xt, yt)\n d = timedelta(tt / 86400.0) + datetime(1950, 1, 1)\n txt.set_text(f\"{d:%Y/%m/%d-%H}\")" + "def anim_ax(**kw):\n fig = plt.figure(figsize=(10, 5), dpi=55)\n axes = fig.add_axes([0, 0, 1, 1], projection=GUI_AXES)\n axes.set_xlim(19, 30), axes.set_ylim(31, 36.5), axes.grid()\n line = axes.plot([], [], \"k\", **kw)[0]\n return fig, axes.text(21, 32.1, \"\"), line\n\n\ndef update(_):\n tt, xt, yt = f.__next__()\n mappable.set_data(xt, yt)\n d = timedelta(tt / 86400.0) + datetime(1950, 1, 1)\n txt.set_text(f\"{d:%Y/%m/%d-%H}\")" ] }, { @@ -158,7 +158,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb index e123abe4..f4e5f77e 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "from datetime import datetime\n\nfrom matplotlib import pyplot as plt\nfrom numpy import arange, isnan, ma, meshgrid, zeros\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection, RegularGridDataset\n\nstart_logger().setLevel(\"ERROR\")" + "from datetime import datetime\n\nfrom matplotlib import pyplot as plt\nfrom numpy import arange, isnan, ma, meshgrid, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection, RegularGridDataset\nfrom py_eddy_tracker.gui import GUI_AXES\n\nstart_logger().setLevel(\"ERROR\")" ] }, { @@ -48,7 +48,7 @@ }, "outputs": [], "source": [ - "def start_ax(title=\"\", dpi=90):\n fig = plt.figure(figsize=(12, 5), dpi=dpi)\n ax = fig.add_axes([0.05, 0.08, 0.9, 0.9], projection=\"full_axes\")\n ax.set_xlim(-6, 36), ax.set_ylim(31, 45)\n ax.set_title(title)\n return fig, ax, ax.text(3, 32, \"\", fontsize=20)\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n cb = plt.colorbar(\n mappable,\n cax=ax.figure.add_axes([0.05, 0.1, 0.9, 0.01]),\n orientation=\"horizontal\",\n )\n cb.set_label(\"LAVD at initial position\")\n return cb\n\n\nkw_lavd = dict(vmin=0, vmax=2e-5, cmap=\"viridis\")" + "def start_ax(title=\"\", dpi=90):\n fig = plt.figure(figsize=(12, 5), dpi=dpi)\n ax = fig.add_axes([0.05, 0.08, 0.9, 0.9], projection=GUI_AXES)\n ax.set_xlim(-6, 36), ax.set_ylim(31, 45)\n ax.set_title(title)\n return fig, ax, ax.text(3, 32, \"\", fontsize=20)\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n cb = plt.colorbar(\n mappable,\n cax=ax.figure.add_axes([0.05, 0.1, 0.9, 0.01]),\n orientation=\"horizontal\",\n )\n cb.set_label(\"LAVD at initial position\")\n return cb\n\n\nkw_lavd = dict(vmin=0, vmax=2e-5, cmap=\"viridis\")" ] }, { @@ -194,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_atlas.ipynb b/notebooks/python_module/16_network/pet_atlas.ipynb index 514317a6..ee8f1934 100644 --- a/notebooks/python_module/16_network/pet_atlas.ipynb +++ b/notebooks/python_module/16_network/pet_atlas.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "from matplotlib import pyplot as plt\nfrom numpy import ma\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.data import get_remote_demo_sample\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nn = NetworkObservations.load_file(\n get_remote_demo_sample(\n \"eddies_med_adt_allsat_dt2018_err70_filt500_order1/Anticyclonic_network.nc\"\n )\n)" + "from matplotlib import pyplot as plt\nfrom numpy import ma\n\nfrom py_eddy_tracker.data import get_remote_demo_sample\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nn = NetworkObservations.load_file(\n get_remote_demo_sample(\n \"eddies_med_adt_allsat_dt2018_err70_filt500_order1/Anticyclonic_network.nc\"\n )\n)" ] }, { @@ -62,7 +62,7 @@ }, "outputs": [], "source": [ - "def start_axes(title):\n fig = plt.figure(figsize=(13, 5))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=\"full_axes\")\n ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" + "def start_axes(title):\n fig = plt.figure(figsize=(13, 5))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES)\n ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" ] }, { @@ -363,7 +363,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb index e803df5f..743b753f 100644 --- a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb +++ b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.observations.network import NetworkObservations" + "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations" ] }, { @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=\"full_axes\")\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.03, 0.06, 0.90, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES)\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.03, 0.06, 0.90, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" ] }, { @@ -248,7 +248,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_relative.ipynb b/notebooks/python_module/16_network/pet_relative.ipynb index 23537375..cee4010a 100644 --- a/notebooks/python_module/16_network/pet_relative.ipynb +++ b/notebooks/python_module/16_network/pet_relative.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "from matplotlib import pyplot as plt\nfrom numpy import where\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.observations.network import NetworkObservations" + "from matplotlib import pyplot as plt\nfrom numpy import where\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations" ] }, { @@ -447,7 +447,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=\"full_axes\")\nn.plot(ax, color_cycle=n.COLORS)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nax = fig.add_axes([0.08, 0.7, 0.7, 0.3])\n_ = n.display_timeline(ax)" + "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nn.plot(ax, color_cycle=n.COLORS)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nax = fig.add_axes([0.08, 0.7, 0.7, 0.3])\n_ = n.display_timeline(ax)" ] }, { @@ -465,7 +465,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=\"full_axes\")\nn.plot(ax, color_cycle=n.COLORS)\nm1, m0, m0_stop = n.merging_event(triplet=True)\nm1.display(ax, color=\"violet\", lw=2, label=\"Eddies after merging\")\nm0.display(ax, color=\"blueviolet\", lw=2, label=\"Eddies before merging\")\nm0_stop.display(ax, color=\"black\", lw=2, label=\"Eddies stopped by merging\")\nax.plot(m1.lon, m1.lat, marker=\".\", color=\"purple\", ls=\"\")\nax.plot(m0.lon, m0.lat, marker=\".\", color=\"blueviolet\", ls=\"\")\nax.plot(m0_stop.lon, m0_stop.lat, marker=\".\", color=\"black\", ls=\"\")\nax.legend()\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nm1" + "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES)\nn.plot(ax, color_cycle=n.COLORS)\nm1, m0, m0_stop = n.merging_event(triplet=True)\nm1.display(ax, color=\"violet\", lw=2, label=\"Eddies after merging\")\nm0.display(ax, color=\"blueviolet\", lw=2, label=\"Eddies before merging\")\nm0_stop.display(ax, color=\"black\", lw=2, label=\"Eddies stopped by merging\")\nax.plot(m1.lon, m1.lat, marker=\".\", color=\"purple\", ls=\"\")\nax.plot(m0.lon, m0.lat, marker=\".\", color=\"blueviolet\", ls=\"\")\nax.plot(m0_stop.lon, m0_stop.lat, marker=\".\", color=\"black\", ls=\"\")\nax.legend()\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nm1" ] }, { @@ -483,7 +483,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=\"full_axes\")\nn.plot(ax, color_cycle=n.COLORS)\ns0, s1, s1_start = n.spliting_event(triplet=True)\ns0.display(ax, color=\"violet\", lw=2, label=\"Eddies before splitting\")\ns1.display(ax, color=\"blueviolet\", lw=2, label=\"Eddies after splitting\")\ns1_start.display(ax, color=\"black\", lw=2, label=\"Eddies starting by splitting\")\nax.plot(s0.lon, s0.lat, marker=\".\", color=\"purple\", ls=\"\")\nax.plot(s1.lon, s1.lat, marker=\".\", color=\"blueviolet\", ls=\"\")\nax.plot(s1_start.lon, s1_start.lat, marker=\".\", color=\"black\", ls=\"\")\nax.legend()\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\ns1" + "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES)\nn.plot(ax, color_cycle=n.COLORS)\ns0, s1, s1_start = n.spliting_event(triplet=True)\ns0.display(ax, color=\"violet\", lw=2, label=\"Eddies before splitting\")\ns1.display(ax, color=\"blueviolet\", lw=2, label=\"Eddies after splitting\")\ns1_start.display(ax, color=\"black\", lw=2, label=\"Eddies starting by splitting\")\nax.plot(s0.lon, s0.lat, marker=\".\", color=\"purple\", ls=\"\")\nax.plot(s1.lon, s1.lat, marker=\".\", color=\"blueviolet\", ls=\"\")\nax.plot(s1_start.lon, s1_start.lat, marker=\".\", color=\"black\", ls=\"\")\nax.legend()\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\ns1" ] }, { @@ -501,7 +501,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=\"full_axes\")\nbirth = n.birth_event()\nbirth.display(ax)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nbirth" + "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES)\nbirth = n.birth_event()\nbirth.display(ax)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\nbirth" ] }, { @@ -519,7 +519,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=\"full_axes\")\ndeath = n.death_event()\ndeath.display(ax)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\ndeath" + "fig = plt.figure(figsize=(15, 8))\nax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES)\ndeath = n.death_event()\ndeath.display(ax)\nax.set_xlim(17.5, 27.5), ax.set_ylim(31, 36), ax.grid()\ndeath" ] } ], @@ -539,7 +539,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb index 7acb3f51..48f4955b 100644 --- a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb +++ b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "from datetime import datetime, timedelta\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import where\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=\"full_axes\")\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.04, 0.06, 0.89, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" + "from datetime import datetime, timedelta\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import where\n\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES)\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.04, 0.06, 0.89, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index 18da8478..ae36381c 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "# sphinx_gallery_thumbnail_number = 2\nimport re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.colors import ListedColormap\nfrom numpy import ones, where\n\nimport py_eddy_tracker.gui\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" + "# sphinx_gallery_thumbnail_number = 2\nimport re\n\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.colors import ListedColormap\nfrom numpy import ones, where\n\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" ] }, { @@ -109,7 +109,7 @@ }, "outputs": [], "source": [ - "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames],\n e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=\"full_axes\")\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" + "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames],\n e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" ] }, { @@ -127,7 +127,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(16, 9))\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=\"full_axes\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\n_ = ax.scatter(e.lon, e.lat, c=TRACKS[-1], cmap=cmap, vmin=0, vmax=vmax, s=20)" + "fig = plt.figure(figsize=(16, 9))\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\n_ = ax.scatter(e.lon, e.lat, c=TRACKS[-1], cmap=cmap, vmin=0, vmax=vmax, s=20)" ] } ], @@ -147,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/src/py_eddy_tracker/gui.py b/src/py_eddy_tracker/gui.py index 423ff306..1fe236dd 100644 --- a/src/py_eddy_tracker/gui.py +++ b/src/py_eddy_tracker/gui.py @@ -26,12 +26,15 @@ def __init__(self, *args, **kwargs): self.set_aspect("equal") +GUI_AXES = "full_axes" + + class GUIAxes(PlatCarreAxes): """ Axes which will use full space available """ - name = "full_axes" + name = GUI_AXES def end_pan(self, *args, **kwargs): (x0, x1), (y0, y1) = self.get_xlim(), self.get_ylim() @@ -125,7 +128,7 @@ def med(self): def setup(self): self.figure = plt.figure() # map - self.map = self.figure.add_axes((0, 0.25, 1, 0.75), projection="full_axes") + self.map = self.figure.add_axes((0, 0.25, 1, 0.75), projection=GUI_AXES) self.map.grid() self.map.tick_params("both", pad=-22) # self.map.tick_params("y", pad=-22) From 8de7284c08dbb6c4c425b40cb9b2e2d2f4dcdf0c Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 5 Apr 2021 16:47:34 +0200 Subject: [PATCH 011/115] Clean requirements --- requirements.txt | 2 -- requirements_dev.txt | 7 +++++++ requirements_doc.txt | 16 ---------------- 3 files changed, 7 insertions(+), 18 deletions(-) create mode 100644 requirements_dev.txt delete mode 100644 requirements_doc.txt diff --git a/requirements.txt b/requirements.txt index eae54426..9539c555 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,5 +9,3 @@ pyyaml requests scipy zarr -# for binder -pyeddytrackersample \ No newline at end of file diff --git a/requirements_dev.txt b/requirements_dev.txt new file mode 100644 index 00000000..a005c37d --- /dev/null +++ b/requirements_dev.txt @@ -0,0 +1,7 @@ +-r requirements.txt +isort +black +blackdoc +flake8 +pytest +pytest-cov \ No newline at end of file diff --git a/requirements_doc.txt b/requirements_doc.txt deleted file mode 100644 index 0d926b32..00000000 --- a/requirements_doc.txt +++ /dev/null @@ -1,16 +0,0 @@ -matplotlib -netCDF4 -numba -numpy -opencv-python -pint -polygon3 -pyyaml -requests -scipy -zarr -# doc -sphinx-gallery -pyeddytrackersample -sphinx_rtd_theme -sphinx>=3.1 From 872f65ad7a0bec68848d3d7dbff4567c9141e7f8 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 5 Apr 2021 16:47:59 +0200 Subject: [PATCH 012/115] Add test for visvlingam --- tests/test_poly.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/test_poly.py b/tests/test_poly.py index 1a0edb6f..cca53635 100644 --- a/tests/test_poly.py +++ b/tests/test_poly.py @@ -1,4 +1,4 @@ -from numpy import array, pi +from numpy import array, pi, roll from pytest import approx from py_eddy_tracker.poly import ( @@ -40,6 +40,14 @@ def test_convex_hull(): def test_visvalingam(): x = array([1, 2, 3, 4, 5, 6.75, 6, 1]) y = array([-0.5, -1.5, -1, -1.75, -1, -1, -0.5, -0.5]) + x_target = [1, 2, 3, 4, 6, 1] + y_target = [-0.5, -1.5, -1, -1.75, -0.5, -0.5] x_, y_ = visvalingam(x, y, 6) - assert ([1, 2, 3, 4, 6, 1] == x_).all() - assert ([-0.5, -1.5, -1, -1.75, -0.5, -0.5] == y_).all() + assert (x_target == x_).all() + assert (y_target == y_).all() + x_, y_ = visvalingam(x[:-1], y[:-1], 6) + assert (x_target == x_).all() + assert (y_target == y_).all() + x_, y_ = visvalingam(roll(x, 2), roll(y, 2), 6) + assert (x_target[:-1] == x_[1:]).all() + assert (y_target[:-1] == y_[1:]).all() From 5ffc05adb3a160b939af4b24fda9436b19f69b99 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 5 Apr 2021 23:44:44 +0200 Subject: [PATCH 013/115] Add documentation about visvalingam and a correction about point removed --- src/py_eddy_tracker/observations/network.py | 7 +- src/py_eddy_tracker/poly.py | 105 +++++++++++--------- 2 files changed, 60 insertions(+), 52 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 115197de..0758d940 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -15,8 +15,8 @@ empty, in1d, ones, - uint32, uint16, + uint32, unique, where, zeros, @@ -490,7 +490,9 @@ def relatives(self, obs, order=2): """ Extract the segments at a certain order from multiple observations. - :param iterable,int obs: indices of observation for relatives computation. Can be one observation (int) or collection of observations (iterable(int)) + :param iterable,int obs: + indices of observation for relatives computation. Can be one observation (int) + or collection of observations (iterable(int)) :param int order: order of relatives wanted. 0 means only observations in obs, 1 means direct relatives, ... :return: all segments relatives @@ -532,7 +534,6 @@ def relatives(self, obs, order=2): segments_connexion[n_seg][1].append(seg) i_obs = [obs] if not hasattr(obs, "__iter__") else obs - distance = zeros(segment.size, dtype=uint16) - 1 def loop(seg, dist=1): diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index ad8fc148..1d40cc84 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -716,50 +716,85 @@ def tri_area2(x, y, i0, i1, i2): def visvalingam(x, y, fixed_size=18): """Polygon simplification with visvalingam algorithm + X, Y are considered like a polygon, the next point after the last one is the first one :param array x: :param array y: :param int fixed_size: array size of out - :return: New (x, y) array + :return: + New (x, y) array, last position will be equal to first one, if array size is 6, + there is only 5 point. :rtype: array,array + + .. plot:: + + import matplotlib.pyplot as plt + import numpy as np + from py_eddy_tracker.poly import visvalingam + + x = np.array([1, 2, 3, 4, 5, 6.75, 6, 1]) + y = np.array([-0.5, -1.5, -1, -1.75, -1, -1, -0.5, -0.5]) + ax = plt.subplot(111) + ax.set_aspect("equal") + ax.grid(True), ax.set_ylim(-2, -.2) + ax.plot(x, y, "r", lw=5) + ax.plot(*visvalingam(x,y,6), "b", lw=2) + plt.show() """ + # TODO : in case of original size lesser than fixed size, jump at the end nb = x.shape[0] - i0, i1 = nb - 3, nb - 2 + nb_ori = nb + # Get indice of first triangle + i0, i1 = nb - 2, nb - 1 + # Init heap with first area and tiangle h = [(tri_area2(x, y, i0, i1, 0), (i0, i1, 0))] + # Roll index for next one i0 = i1 i1 = 0 - i_previous = empty(nb - 1, dtype=numba_types.int32) - i_next = empty(nb - 1, dtype=numba_types.int32) + # Index of previous valid point + i_previous = empty(nb, dtype=numba_types.int64) + # Index of next valid point + i_next = empty(nb, dtype=numba_types.int64) + # Mask of removed + removed = zeros(nb, dtype=numba_types.bool_) i_previous[0] = -1 i_next[0] = -1 - for i in range(1, nb - 1): + for i in range(1, nb): i_previous[i] = -1 i_next[i] = -1 + # We add triangle area for all triangle heapq.heappush(h, (tri_area2(x, y, i0, i1, i), (i0, i1, i))) i0 = i1 i1 = i # we continue until we are equal to nb_pt - while len(h) >= fixed_size: + while nb >= fixed_size: # We pop lower area _, (i0, i1, i2) = heapq.heappop(h) # We check if triangle is valid(i0 or i2 not removed) - i_p, i_n = i_previous[i0], i_next[i2] - if i_p == -1 and i_n == -1: - # We store reference of delete point - i_previous[i1] = i0 - i_next[i1] = i2 + if removed[i0] or removed[i2]: + # In this cas nothing to do continue - elif i_p == -1: - i2 = i_n - elif i_n == -1: - i0 = i_p - else: - # in this case we replace two point - i0, i2 = i_p, i_n - heapq.heappush(h, (tri_area2(x, y, i0, i1, i2), (i0, i1, i2))) + # Flag obs like removed + removed[i1] = True + # We count point still valid + nb -= 1 + # Modify index for the next and previous, we jump over i1 + i_previous[i2] = i0 + i_next[i0] = i2 + # We insert 2 triangles which are modified by the deleted point + # Previous triangle + i_1 = i_previous[i0] + if i_1 == -1: + i_1 = (i0 - 1) % nb_ori + heapq.heappush(h, (tri_area2(x, y, i_1, i0, i2), (i_1, i0, i2))) + # Previous triangle + i3 = i_next[i2] + if i3 == -1: + i3 = (i2 + 1) % nb_ori + heapq.heappush(h, (tri_area2(x, y, i0, i2, i3), (i0, i2, i3))) x_new, y_new = empty(fixed_size, dtype=x.dtype), empty(fixed_size, dtype=y.dtype) j = 0 - for i, i_n in enumerate(i_next): - if i_n == -1: + for i, flag in enumerate(removed): + if not flag: x_new[j] = x[i] y_new[j] = y[i] j += 1 @@ -872,34 +907,6 @@ def poly_indexs(x_p, y_p, x_c, y_c): return indexs -@njit(cache=True) -def poly_indexs_old(x_p, y_p, x_c, y_c): - """ - index of contour for each postion inside a contour, -1 in case of no contour - - :param array x_p: longitude to test - :param array y_p: latitude to test - :param array x_c: longitude of contours - :param array y_c: latitude of contours - """ - nb_p = x_p.shape[0] - nb_c = x_c.shape[0] - indexs = -ones(nb_p, dtype=numba_types.int32) - for i in range(nb_c): - x_, y_ = reduce_size(x_c[i], y_c[i]) - x_c_min, y_c_min = x_.min(), y_.min() - x_c_max, y_c_max = x_.max(), y_.max() - v = create_vertice(x_, y_) - for j in range(nb_p): - if indexs[j] != -1: - continue - x, y = x_p[j], y_p[j] - if x > x_c_min and x < x_c_max and y > y_c_min and y < y_c_max: - if winding_number_poly(x, y, v) != 0: - indexs[j] = i - return indexs - - @njit(cache=True) def insidepoly(x_p, y_p, x_c, y_c): """ From 8e6b5d47c006bc3c79a8836beb885c7db05056fd Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Tue, 6 Apr 2021 14:43:29 +0200 Subject: [PATCH 014/115] - minor english --- examples/01_general_things/pet_storage.py | 2 +- examples/14_generic_tools/pet_fit_contour.py | 12 +++---- examples/16_network/pet_follow_particle.py | 12 +++---- examples/16_network/pet_ioannou_2017_case.py | 34 +++++++++++--------- 4 files changed, 32 insertions(+), 28 deletions(-) diff --git a/examples/01_general_things/pet_storage.py b/examples/01_general_things/pet_storage.py index 28f0f76e..9f0ec61e 100644 --- a/examples/01_general_things/pet_storage.py +++ b/examples/01_general_things/pet_storage.py @@ -32,7 +32,7 @@ # array field like contour/profile are 2D column. # %% -# Eddies files (zarr or netcdf) could be loaded with ```load_file``` method: +# Eddies files (zarr or netcdf) can be loaded with ```load_file``` method: eddies_collections = EddiesObservations.load_file(get_demo_path("Cyclonic_20160515.nc")) eddies_collections.field_table() # offset and scale_factor are used only when data is stored in zarr or netCDF4 diff --git a/examples/14_generic_tools/pet_fit_contour.py b/examples/14_generic_tools/pet_fit_contour.py index 9c3f9183..2d3b6dc9 100644 --- a/examples/14_generic_tools/pet_fit_contour.py +++ b/examples/14_generic_tools/pet_fit_contour.py @@ -15,7 +15,7 @@ from py_eddy_tracker import data from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates from py_eddy_tracker.observations.observation import EddiesObservations -from py_eddy_tracker.poly import fit_circle_, fit_ellips +from py_eddy_tracker.poly import fit_circle_, fit_ellipse # %% # Load example identification file @@ -23,14 +23,14 @@ # %% -# Function to draw circle or ellips from parameter +# Function to draw circle or ellipse from parameter def build_circle(x0, y0, r): angle = radians(linspace(0, 360, 50)) x_norm, y_norm = cos(angle), sin(angle) return local_to_coordinates(x_norm * r, y_norm * r, x0, y0) -def build_ellips(x0, y0, a, b, theta): +def build_ellipse(x0, y0, a, b, theta): angle = radians(linspace(0, 360, 50)) x = a * cos(theta) * cos(angle) - b * sin(theta) * sin(angle) y = a * sin(theta) * cos(angle) + b * cos(theta) * sin(angle) @@ -38,7 +38,7 @@ def build_ellips(x0, y0, a, b, theta): # %% -# Plot fitted circle or ellips on stored contour +# Plot fitted circle or ellipse on stored contour xs, ys = a.contour_lon_s, a.contour_lat_s fig = plt.figure(figsize=(15, 15)) @@ -51,9 +51,9 @@ def build_ellips(x0, y0, a, b, theta): ax = fig.add_subplot(4, 4, j) ax.grid(), ax.set_aspect("equal") ax.plot(x, y, label="store", color="black") - x0, y0, a, b, theta = fit_ellips(x_, y_) + x0, y0, a, b, theta = fit_ellipse(x_, y_) x0, y0 = local_to_coordinates(x0, y0, x0_, y0_) - ax.plot(*build_ellips(x0, y0, a, b, theta), label="ellips", color="green") + ax.plot(*build_ellipse(x0, y0, a, b, theta), label="ellipse", color="green") x0, y0, radius, shape_error = fit_circle_(x_, y_) x0, y0 = local_to_coordinates(x0, y0, x0_, y0_) ax.plot(*build_circle(x0, y0, radius), label="circle", color="red", lw=0.5) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 65746015..0c4be55d 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -33,7 +33,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which are not used but consumes same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass @@ -147,7 +147,7 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): e = eddies.extract_with_mask(m_start) # to be able to get global index translate_start = where(m_start)[0] - # Identify particle in eddies(only in core) + # Identify particle in eddies (only in core) i_start = e.contains(x, y, intern=True) m = i_start != -1 x, y, i_start = x[m], y[m], i_start[m] @@ -158,9 +158,9 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): e_end = eddies.extract_with_mask(m_end) # to be able to get global index translate_end = where(m_end)[0] - # Id eddies for each alive particle(in core and extern) + # Id eddies for each alive particle (in core and extern) i_end = e_end.contains(x, y) - # compute matrix and filled target array + # compute matrix and fill target array get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) @@ -169,7 +169,7 @@ def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): nb_start, nb_end = translate_start.size, translate_end.size # Matrix which will store count for every couple count = zeros((nb_start, nb_end), dtype=nb_types.int32) - # Number of particle in each origin observation + # Number of particles in each origin observation ref = zeros(nb_start, dtype=nb_types.int32) # For each particle for i in range(i_start.size): @@ -181,7 +181,7 @@ def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): for i in range(nb_start): for j in range(nb_end): pct_ = count[i, j] - # If there are particle from i to j + # If there are particles from i to j if pct_ != 0: # Get percent pct_ = pct_ / ref[i] * 100.0 diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 54f124f7..237cfc57 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -14,15 +14,19 @@ from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation from matplotlib.ticker import FuncFormatter -from numpy import arange, where +from numpy import arange, where, array, pi import py_eddy_tracker.gui from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.observations.network import NetworkObservations +from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates +from py_eddy_tracker.poly import fit_ellipse # %% + + class VideoAnimation(FuncAnimation): def _repr_html_(self, *args, **kwargs): """To get video in html and have a player""" @@ -192,43 +196,43 @@ def update_axes(ax, mappable=None): # %% # Rotation angle # -------------- -from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates -from py_eddy_tracker.poly import fit_ellips +# For each obs, fit an ellipse to the contour, with theta the angle from the x-axis, +# a the semi ax in x direction and b the semi ax in y dimension + theta_ = list() a_ = list() b_ = list() for obs in close_to_i3: - x, y = obs['contour_lon_s'], obs['contour_lat_s'] + x, y = obs["contour_lon_s"], obs["contour_lat_s"] x0_, y0_ = x.mean(), y.mean() x_, y_ = coordinates_to_local(x, y, x0_, y0_) - x0, y0, a, b, theta = fit_ellips(x_, y_) + x0, y0, a, b, theta = fit_ellipse(x_, y_) theta_.append(theta) a_.append(a) b_.append(b) -a_=array(a_) -b_=array(b_) +a_ = array(a_) +b_ = array(b_) # %% # Theta ax = timeline_axes() -m = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi/2, vmax=pi/2, cmap='hsv') +m = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi / 2, vmax=pi / 2, cmap="hsv") cb = update_axes(ax, m["scatter"]) # %% -# A +# a ax = timeline_axes() -m = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap='Spectral_r') +m = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r") cb = update_axes(ax, m["scatter"]) # %% -# B +# b ax = timeline_axes() -m = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap='Spectral_r') +m = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r") cb = update_axes(ax, m["scatter"]) # %% -# A/B +# a/b ax = timeline_axes() -m = close_to_i3.scatter_timeline(ax, a_/b_, vmin=1, vmax=2, cmap='Spectral_r') +m = close_to_i3.scatter_timeline(ax, a_ / b_, vmin=1, vmax=2, cmap="Spectral_r") cb = update_axes(ax, m["scatter"]) - From d883b484acf7677adc04c820078f6ede07f57a71 Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Tue, 6 Apr 2021 14:44:20 +0200 Subject: [PATCH 015/115] -minor english --- .../old_tracker_reference.py | 8 ++++---- src/py_eddy_tracker/observations/network.py | 8 +------- .../observations/observation.py | 20 +++++++++---------- src/py_eddy_tracker/poly.py | 4 ++-- 4 files changed, 17 insertions(+), 23 deletions(-) diff --git a/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py b/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py index 7baaffd3..41e02db9 100644 --- a/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py +++ b/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py @@ -21,13 +21,13 @@ def cost_function(records_in, records_out, distance): return distance def mask_function(self, other, distance): - """We mask link with ellips and ratio""" - # Compute Parameter of ellips + """We mask link with ellipse and ratio""" + # Compute Parameter of ellipse minor, major = 1.05, 1.5 - y = self.basic_formula_ellips_major_axis( + y = self.basic_formula_ellipse_major_axis( self.lat, degrees=True, c0=minor, cmin=minor, cmax=major, lat1=23, lat2=5 ) - # mask from ellips + # mask from ellipse mask = self.shifted_ellipsoid_degrees_mask( other, minor=minor, major=y # Minor can be bigger than major?? ) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 7a57bbca..91849955 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -513,7 +513,6 @@ def relatives(self, obs, order=2): else: segments_connexion[seg][0] = i_slice - if i_p != -1: if p_seg not in segments_connexion: @@ -531,12 +530,7 @@ def relatives(self, obs, order=2): segments_connexion[seg][1].append(n_seg) segments_connexion[n_seg][1].append(seg) - - i_obs = ( - [obs] - if not hasattr(obs, "__iter__") - else obs - ) + i_obs = [obs] if not hasattr(obs, "__iter__") else obs import numpy as np distance = zeros(segment.size, dtype=np.uint16) - 1 diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 0d0f45b7..5ee45a28 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -87,7 +87,7 @@ def shifted_ellipsoid_degrees_mask2(lon0, lat0, lon1, lat1, minor=1.5, major=1.5 # Focal f_right = lon0 f_left = f_right - (c - minor) - # Ellips center + # Ellipse center x_c = (f_left + f_right) * 0.5 nb_0, nb_1 = lat0.shape[0], lat1.shape[0] @@ -1248,7 +1248,7 @@ def shifted_ellipsoid_degrees_mask(self, other, minor=1.5, major=1.5): ) def fixed_ellipsoid_mask( - self, other, minor=50, major=100, only_east=False, shifted_ellips=False + self, other, minor=50, major=100, only_east=False, shifted_ellipse=False ): dist = self.distance(other).T accepted = dist < minor @@ -1272,13 +1272,13 @@ def fixed_ellipsoid_mask( ) lon_self = self.lon[index_self] - if shifted_ellips: - x_center_ellips = lon_self - (major - minor) / 2 + if shifted_ellipse: + x_center_ellipse = lon_self - (major - minor) / 2 else: - x_center_ellips = lon_self + x_center_ellipse = lon_self - lon_left_f = x_center_ellips - f_degree - lon_right_f = x_center_ellips + f_degree + lon_left_f = x_center_ellipse - f_degree + lon_right_f = x_center_ellipse + f_degree dist_left_f = distance( lon_left_f, @@ -1302,7 +1302,7 @@ def fixed_ellipsoid_mask( return accepted.T @staticmethod - def basic_formula_ellips_major_axis( + def basic_formula_ellipse_major_axis( lats, cmin=1.5, cmax=10.0, c0=1.5, lat1=13.5, lat2=5.0, degrees=False ): """Give major axis in km with a given latitude""" @@ -2054,8 +2054,8 @@ def contains(self, x, y, intern=False): :rtype: array[int32] """ xname, yname = self.intern(intern) - m = ~ (isnan(x) + isnan(y)) - i = -ones(x.shape, dtype='i4') + m = ~(isnan(x) + isnan(y)) + i = -ones(x.shape, dtype="i4") i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) return i diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index af621423..5e22e797 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -522,9 +522,9 @@ def fit_circle(x, y): @njit(cache=True) -def fit_ellips(x, y): +def fit_ellipse(x, y): r""" - From a polygon, function will fit an ellips. + From a polygon, function will fit an ellipse. Must be call with local coordinates (in m, to get a radius in m). From 93709e4bd26908aef17898754246b55d50a00924 Mon Sep 17 00:00:00 2001 From: CoriPegliasco <66008544+CoriPegliasco@users.noreply.github.com> Date: Wed, 7 Apr 2021 13:27:19 +0200 Subject: [PATCH 016/115] documentation english corrections (#79) documentation : english corrections --- src/py_eddy_tracker/appli/network.py | 8 +- src/py_eddy_tracker/dataset/grid.py | 128 ++++++++-------- src/py_eddy_tracker/generic.py | 48 +++--- src/py_eddy_tracker/gui.py | 2 +- src/py_eddy_tracker/observations/groups.py | 28 ++-- src/py_eddy_tracker/observations/network.py | 144 ++++++++++-------- .../observations/observation.py | 58 +++---- src/py_eddy_tracker/observations/tracking.py | 36 ++--- 8 files changed, 232 insertions(+), 220 deletions(-) diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index 90450078..5c4cdcaf 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -37,7 +37,7 @@ def build_network(): def divide_network(): - parser = EddyParser("Separate path for a same group(network)") + parser = EddyParser("Separate path for a same group (network)") parser.add_argument("input", help="input network file") parser.add_argument("out", help="output file") parser.contour_intern_arg() @@ -66,7 +66,7 @@ def subset_network(): "--length", nargs=2, type=int, - help="Nb of day which must be cover by network, first minimum number of day and last maximum number of day," + help="Nb of days that must be covered by the network, first minimum number of day and last maximum number of day," "if value is negative, this bound won't be used", ) parser.add_argument( @@ -85,8 +85,8 @@ def subset_network(): "--period", nargs=2, type=int, - help="Start day and end day, if it's negative value we will add to day min and add to day max," - "if 0 it s not use", + help="Start day and end day, if it's a negative value we will add to day min and add to day max," + "if 0 it is not used", ) args = parser.parse_args() n = NetworkObservations.load_file(args.input, raw_data=True) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 28dc8330..14fe9ae3 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -153,7 +153,7 @@ def _circle_from_equal_area(vertice): # last coordinates == first lon0, lat0 = lons[1:].mean(), lats[1:].mean() c_x, c_y = coordinates_to_local(lons, lats, lon0, lat0) - # Some time, edge is only a dot of few coordinates + # Sometimes, edge is only a dot of few coordinates d_lon = lons.max() - lons.min() d_lat = lats.max() - lats.min() if d_lon < 1e-7 and d_lat < 1e-7: @@ -239,7 +239,7 @@ def nb_pixel(self): class GridDataset(object): """ - Class to have basic tool on NetCDF Grid + Class for basic tools on NetCDF Grid """ __slots__ = ( @@ -274,7 +274,7 @@ def __init__( :param str x_name: Name of longitude coordinates :param str y_name: Name of latitude coordinates :param bool,None centered: Allow to know how coordinates could be used with pixel - :param dict indexs: A dictionary which set indexs to use for non-coordinate dimensions + :param dict indexs: A dictionary that sets indexes to use for non-coordinate dimensions :param bool unset: Set to True to create an empty grid object without file """ self.dimensions = None @@ -294,7 +294,7 @@ def __init__( self.indexs = dict() if indexs is None else indexs if centered is None: logger.warning( - "We assume pixel position of grid is center for %s", filename + "We assume pixel position of grid is centered for %s", filename ) if not unset: self.load_general_features() @@ -314,7 +314,7 @@ def is_centered(self): return self.centered def load_general_features(self): - """Load attrs to be stored in object""" + """Load attrs to be stored in object""" logger.debug( "Load general feature from %(filename)s", dict(filename=self.filename) ) @@ -395,9 +395,9 @@ def load(self): @staticmethod def c_to_bounds(c): """ - Centred coordinates to bounds coordinates + Centered coordinates to bounds coordinates - :param array c: centred coordinates to translate + :param array c: centered coordinates to translate :return: bounds coordinates """ bounds = concatenate((c, (2 * c[-1] - c[-2],))) @@ -558,7 +558,7 @@ def grid_tiles(self, varname, slice_x, slice_y): return data def high_filter(self, grid_name, w_cut, **kwargs): - """Return the grid high-pass filtered, by substracting to the grid the low-pass filter (default: order=1) + """Return the high-pass filtered grid, by substracting to the initial grid the low-pass filtered grid (default: order=1) :param grid_name: the name of the grid :param int, w_cut: the half-power wavelength cutoff (km) @@ -567,7 +567,7 @@ def high_filter(self, grid_name, w_cut, **kwargs): self.vars[grid_name] -= result def low_filter(self, grid_name, w_cut, **kwargs): - """Return the grid low-pass filtered (default: order=1) + """Return the low-pass filtered grid (default: order=1) :param grid_name: the name of the grid :param int, w_cut: the half-power wavelength cutoff (km) @@ -607,11 +607,11 @@ def eddy_identification( :param str grid_height: Grid name of Sea Surface Height :param str uname: Grid name of u speed component :param str vname: Grid name of v speed component - :param datetime.datetime date: Date which will be stored in object to date data + :param datetime.datetime date: Date to be stored in object to date data :param float,int step: Height between two layers in m :param float,int shape_error: Maximal error allowed for outermost contour in % :param int sampling: Number of points to store contours and speed profile - :param str sampling_method: Method to resample 'uniform' or 'visvalingam' + :param str sampling_method: Method to resample, 'uniform' or 'visvalingam' :param (int,int),None pixel_limit: Min and max number of pixels inside the inner and the outermost contour to be considered as an eddy :param float,None precision: Truncate values at the defined precision in m @@ -625,8 +625,8 @@ def eddy_identification( .. minigallery:: py_eddy_tracker.GridDataset.eddy_identification """ if not isinstance(date, datetime): - raise Exception("Date argument be a datetime object") - # The inf limit must be in pixel and sup limit in surface + raise Exception("Date argument must be a datetime object") + # The inf limit must be in pixel and sup limit in surface if pixel_limit is None: pixel_limit = (4, 1000) @@ -651,10 +651,10 @@ def eddy_identification( # Get ssh grid data = self.grid(grid_height).astype("f8") - # In case of a reduce mask + # In case of a reduced mask if len(data.mask.shape) == 0 and not data.mask: data.mask = zeros(data.shape, dtype="bool") - # we remove noisy information + # we remove noisy data if precision is not None: data = (data / precision).round() * precision # Compute levels for ssh @@ -754,7 +754,7 @@ def eddy_identification( continue # Test the number of pixels within the outermost contour - # FIXME : Maybe limit max must be replace with a maximum of surface + # FIXME : Maybe limit max must be replaced with a maximum of surface if ( contour.nb_pixel < pixel_limit[0] or contour.nb_pixel > pixel_limit[1] @@ -794,7 +794,7 @@ def eddy_identification( centlon_e = x[centi, centj] centlat_e = y[centi, centj] - # centlat_e and centlon_e must be index of maximum, we will loose some inner contour if it's not + # centlat_e and centlon_e must be indexes of maximum, we will loose some inner contour if it's not ( max_average_speed, speed_contour, @@ -812,7 +812,7 @@ def eddy_identification( pixel_min=pixel_limit[0], ) - # FIXME : Instantiate new EddyObservation object (high cost need to be reviewed) + # FIXME : Instantiate new EddyObservation object (high cost, need to be reviewed) obs = EddiesObservations( size=1, track_extra_variables=track_extra_variables, @@ -928,7 +928,7 @@ def get_uavg( pixel_min=3, ): """ - Calculate geostrophic speed around successive contours + Compute geostrophic speed around successive contours Returns the average """ # Init max speed to search maximum @@ -1040,7 +1040,7 @@ def load(self): @property def bounds(self): - """Give bound""" + """Give bounds""" return self.x_c.min(), self.x_c.max(), self.y_c.min(), self.y_c.max() def bbox_indice(self, vertices): @@ -1072,7 +1072,7 @@ def compute_pixel_path(self, x0, y0, x1, y1): pass def init_pos_interpolator(self): - logger.debug("Create a KdTree could be long ...") + logger.debug("Create a KdTree, could be long ...") self.index_interp = cKDTree( create_vertice(self.x_c.reshape(-1), self.y_c.reshape(-1)) ) @@ -1202,10 +1202,10 @@ def bbox_indice(self, vertices): def get_pixels_in(self, contour): """ - Get indices of pixels in contour. + Get indexes of pixels in contour. - :param vertice,Path contour: Contour which enclosed some pixels - :return: Indices of grid in contour + :param vertice,Path contour: Contour that encloses some pixels + :return: Indexes of grid in contour :rtype: array[int],array[int] """ if isinstance(contour, BasePath): @@ -1238,7 +1238,7 @@ def ystep(self): return self._y_step def compute_pixel_path(self, x0, y0, x1, y1): - """Give a series of indexes which describe the path between to position""" + """Give a series of indexes describing the path between two positions""" return compute_pixel_path( x0, y0, @@ -1481,13 +1481,13 @@ def bessel_high_filter(self, grid_name, wave_length, order=1, lat_max=85, **kwar :param str grid_name: grid to filter, data will replace original one :param float wave_length: in km :param int order: order to use, if > 1 negative values of the cardinal sinus are present in kernel - :param float lat_max: absolute latitude above no filtering apply + :param float lat_max: absolute latitude, no filtering above :param dict kwargs: look at :py:meth:`RegularGridDataset.convolve_filter_with_dynamic_kernel` .. minigallery:: py_eddy_tracker.RegularGridDataset.bessel_high_filter """ logger.debug( - "Run filtering with wave of %(wave_length)s km and order of %(order)s ...", + "Run filtering with wavelength of %(wave_length)s km and order of %(order)s ...", dict(wave_length=wave_length, order=order), ) data_out = self.convolve_filter_with_dynamic_kernel( @@ -1813,7 +1813,7 @@ def add_uv(self, grid_height, uname="u", vname="v", stencil_halfwidth=4): ) def speed_coef_mean(self, contour): - """Some nan can be computed over contour if we are near border, + """Some nan can be computed over contour if we are near borders, something to explore """ return mean_on_regular_contour( @@ -1831,10 +1831,10 @@ def init_speed_coef(self, uname="u", vname="v"): def display(self, ax, name, factor=1, ref=None, **kwargs): """ - :param matplotlib.axes.Axes ax: matplotlib axes use to draw + :param matplotlib.axes.Axes ax: matplotlib axes used to draw :param str,array name: variable to display, could be an array :param float factor: multiply grid by - :param float,None ref: if define use like west bound + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param dict kwargs: look at :py:meth:`matplotlib.axes.Axes.pcolormesh` .. minigallery:: py_eddy_tracker.RegularGridDataset.display @@ -1853,10 +1853,10 @@ def display(self, ax, name, factor=1, ref=None, **kwargs): def contour(self, ax, name, factor=1, ref=None, **kwargs): """ - :param matplotlib.axes.Axes ax: matplotlib axes use to draw + :param matplotlib.axes.Axes ax: matplotlib axes used to draw :param str,array name: variable to display, could be an array :param float factor: multiply grid by - :param float,None ref: if define use like west bound + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param dict kwargs: look at :py:meth:`matplotlib.axes.Axes.contour` .. minigallery:: py_eddy_tracker.RegularGridDataset.contour @@ -1944,13 +1944,13 @@ def advect(self, x, y, u_name, v_name, nb_step=10, rk4=True, **kw): """ At each call it will update position in place with u & v field - It's a dummy advection which use only one layer of current + It's a dummy advection using only one layer of current :param array x: Longitude of obs to move :param array y: Latitude of obs to move :param str,array u_name: U field to advect obs :param str,array v_name: V field to advect obs - :param int nb_step: Number of iteration before to release data + :param int nb_step: Number of iterations before releasing data .. minigallery:: py_eddy_tracker.GridDataset.advect """ @@ -1967,13 +1967,13 @@ def filament( """ Produce filament with concatenation of advection - It's a dummy advection which use only one layer of current + It's a dummy advection using only one layer of current :param array x: Longitude of obs to move :param array y: Latitude of obs to move :param str,array u_name: U field to advect obs :param str,array v_name: V field to advect obs - :param int nb_step: Number of iteration before to release data + :param int nb_step: Number of iteration before releasing data :param int filament_size: Number of point by filament :return: x,y for a line @@ -2019,7 +2019,7 @@ def advect_rk4(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): v00, v01, v10, v11 = 0.0, 0.0, 0.0, 0.0 # On each particle for i in prange(x.size): - # If particle are not valid => continue + # If particle is not valid => continue if m[i]: continue x_, y_ = x[i], y[i] @@ -2037,7 +2037,7 @@ def advect_rk4(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): masked, u00, u01, u10, u11, v00, v01, v10, v11 = get_uv_quad( ii_, jj_, u_g, v_g, m_g, nb_x ) - # The 3 following could be in cache operation but this one must be test in any case + # The 3 following could be in cache operation but this one must be tested in any case if masked: x_, y_ = nan, nan m[i] = True @@ -2061,7 +2061,7 @@ def advect_rk4(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): m[i] = True break u2, v2 = interp_uv(xd, yd, u00, u01, u10, u11, v00, v01, v10, v11) - # k3, slope at middle with update guess position + # k3, slope at middle with updated guess position x2, y2 = x_ + u2 * 0.5, y_ + v2 * 0.5 ii_, jj_, xd, yd = get_grid_indices( x_ref, y_ref, x_step, y_step, x2, y2, nb_x @@ -2079,7 +2079,7 @@ def advect_rk4(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): m[i] = True break u3, v3 = interp_uv(xd, yd, u00, u01, u10, u11, v00, v01, v10, v11) - # k4, slope at end with update guess position + # k4, slope at end with updated guess position x3, y3 = x_ + u3, y_ + v3 ii_, jj_, xd, yd = get_grid_indices( x_ref, y_ref, x_step, y_step, x3, y3, nb_x @@ -2115,14 +2115,14 @@ def advect(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): is_circular = abs(x_g[-1] % 360 - (x_g[0] - x_step) % 360) < 1e-5 nb_x_ = x_g.size nb_x = nb_x_ if is_circular else 0 - # Indices which should be never exist + # Indexes which should be never exist i0_old, j0_old = -100000, -100000 masked = False u00, u01, u10, u11 = 0.0, 0.0, 0.0, 0.0 v00, v01, v10, v11 = 0.0, 0.0, 0.0, 0.0 # On each particule for i in prange(x.size): - # If particule are not valid => continue + # If particule is not valid => continue if m[i]: continue # Iterate on whole steps @@ -2130,9 +2130,9 @@ def advect(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): i0, j0, xd, yd = get_grid_indices( x_ref, y_ref, x_step, y_step, x[i], y[i], nb_x ) - # corner are the same need only a new xd and yd + # corners are the same, need only a new xd and yd if i0 != i0_old or j0 != j0_old: - # Need to be store only on change + # Need to be stored only on change i0_old, j0_old = i0, j0 if not is_circular and (i0 < 0 or i0 > nb_x_): masked = True @@ -2152,7 +2152,7 @@ def advect(x_g, y_g, u_g, v_g, m_g, x, y, m, nb_step): @njit(cache=True, fastmath=True) def compute_pixel_path(x0, y0, x1, y1, x_ori, y_ori, x_step, y_step, nb_x): - """Give a serie of indexes describing the path between two position""" + """Give a serie of indexes describing the path between two positions""" # index nx = x0.shape[0] i_x0 = empty(nx, dtype=numba_types.int_) @@ -2171,23 +2171,23 @@ def compute_pixel_path(x0, y0, x1, y1, x_ori, y_ori, x_step, y_step, nb_x): i_x1 = i_x0 + d_x # Delta index of y d_y = i_y1 - i_y0 - # max and abs sum doesn't work on array? + # max and abs sum do not work on array? d_max = empty(nx, dtype=numba_types.int32) nb_value = 0 for i in range(nx): d_max[i] = max(abs(d_x[i]), abs(d_y[i])) - # Compute number of pixel which we go trought + # Compute number of pixel we go trought nb_value += d_max[i] + 1 - # Create an empty array to store value of pixel across the travel + # Create an empty array to store value of pixel across the path i_g = empty(nb_value, dtype=numba_types.int32) j_g = empty(nb_value, dtype=numba_types.int32) # Index to determine the position in the global array ii = 0 - # Iteration on each travel + # Iteration on each path for i, delta in enumerate(d_max): - # If the travel don't cross multiple pixel + # If the path doesn't cross multiple pixels if delta == 0: i_g[ii : ii + delta + 1] = i_x0[i] j_g[ii : ii + delta + 1] = i_y0[i] @@ -2201,7 +2201,7 @@ def compute_pixel_path(x0, y0, x1, y1, x_ori, y_ori, x_step, y_step, nb_x): sup = -1 if d_x[i] < 0 else 1 i_g[ii : ii + delta + 1] = arange(i_x0[i], i_x1[i] + sup, sup) j_g[ii : ii + delta + 1] = i_y0[i] - # In case of multiple direction + # In case of multiple directions else: a = (i_x1[i] - i_x0[i]) / float(i_y1[i] - i_y0[i]) if abs(d_x[i]) >= abs(d_y[i]): @@ -2479,7 +2479,7 @@ def advect_t(x_g, y_g, u_g0, v_g0, m_g0, u_g1, v_g1, m_g1, x, y, m, weigths, hal is_circular = abs(x_g[-1] % 360 - (x_g[0] - x_step) % 360) < 1e-5 nb_x_ = x_g.size nb_x = nb_x_ if is_circular else 0 - # Indices which should be never exist + # Indexes that should never exist i0_old, j0_old = -100000, -100000 m0, m1 = False, False u000, u001, u010, u011 = 0.0, 0.0, 0.0, 0.0 @@ -2488,7 +2488,7 @@ def advect_t(x_g, y_g, u_g0, v_g0, m_g0, u_g1, v_g1, m_g1, x, y, m, weigths, hal v100, v101, v110, v111 = 0.0, 0.0, 0.0, 0.0 # On each particle for i in prange(x.size): - # If particle are not valid => continue + # If particle is not valid => continue if m[i]: continue # Iterate on whole steps @@ -2497,7 +2497,7 @@ def advect_t(x_g, y_g, u_g0, v_g0, m_g0, u_g1, v_g1, m_g1, x, y, m, weigths, hal x_ref, y_ref, x_step, y_step, x[i], y[i], nb_x ) if i0 != i0_old or j0 != j0_old: - # Need to be store only on change + # Need to be stored only on change i0_old, j0_old = i0, j0 if not is_circular and (i0 < 0 or i0 > nb_x_): m0, m1 = True, True @@ -2528,8 +2528,8 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): """ Return u/v for (i0, j0), (i1, j0), (i0, j1), (i1, j1) - :param int i0: indices of longitude - :param int j0: indices of latitude + :param int i0: indexes of longitude + :param int j0: indexes of latitude :param array[float] u: current along x axis :param array[float] v: current along y axis :param array[bool] m: flag to know if position is valid @@ -2552,7 +2552,7 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): @njit(cache=True, fastmath=True) def get_grid_indices(x0, y0, x_step, y_step, x, y, nb_x=0): """ - Return grid indices and weight + Return grid indexes and weight :param float x0: first longitude :param float y0: first latitude @@ -2562,7 +2562,7 @@ def get_grid_indices(x0, y0, x_step, y_step, x, y, nb_x=0): :param float y: latitude to interpolate :param int nb_x: If different of 0 we check if wrapping is needed - :return: indices and weight + :return: indexes and weight :rtype: int,int,float,float """ i, j = (x - x0) / x_step, (y - y0) / y_step @@ -2614,7 +2614,7 @@ def advect_t_rk4( v100, v101, v110, v111 = 0.0, 0.0, 0.0, 0.0 # On each particle for i in prange(x.size): - # If particle are not valid => continue + # If particle is not valid => continue if m[i]: continue x_, y_ = x[i], y[i] @@ -2635,7 +2635,7 @@ def advect_t_rk4( (m1, u100, u101, u110, u111, v100, v101, v110, v111) = get_uv_quad( ii_, jj_, u_g1, v_g1, m_g1, nb_x ) - # The 3 following could be in cache operation but this one must be test in any case + # The 3 following could be in cache operation but this one must be tested in any case if m0 or m1: x_, y_ = nan, nan m[i] = True @@ -2667,7 +2667,7 @@ def advect_t_rk4( u1_, v1_ = interp_uv(xd, yd, u100, u101, u110, u111, v100, v101, v110, v111) w_ = w - half_w u2, v2 = u0_ * w_ + u1_ * (1 - w_), v0_ * w_ + v1_ * (1 - w_) - # k3, slope at middle with update guess position + # k3, slope at middle with updated guess position x2, y2 = x_ + u2 * 0.5, y_ + v2 * 0.5 ii_, jj_, xd, yd = get_grid_indices( x_ref, y_ref, x_step, y_step, x2, y2, nb_x @@ -2690,7 +2690,7 @@ def advect_t_rk4( u0_, v0_ = interp_uv(xd, yd, u000, u001, u010, u011, v000, v001, v010, v011) u1_, v1_ = interp_uv(xd, yd, u100, u101, u110, u111, v100, v101, v110, v111) u3, v3 = u0_ * w_ + u1_ * (1 - w_), v0_ * w_ + v1_ * (1 - w_) - # k4, slope at end with update guess position + # k4, slope at end with updated guess position x3, y3 = x_ + u3, y_ + v3 ii_, jj_, xd, yd = get_grid_indices( x_ref, y_ref, x_step, y_step, x3, y3, nb_x @@ -2737,7 +2737,7 @@ def compute_stencil(x, y, h, m, earth_radius, vertical=False, stencil_halfwidth= :param array x: longitude coordinates :param array y: latitude coordinates :param array h: 2D array to derivate - :param array m: mask associate to h to know where are invalid data + :param array m: mask associated to h to know where are invalid data :param float earth_radius: Earth radius in m :param bool vertical: if True stencil will be vertical (along y) :param int stencil_halfwidth: from 1 to 4 to specify maximal kernel usable @@ -2831,7 +2831,7 @@ def compute_stencil(x, y, h, m, earth_radius, vertical=False, stencil_halfwidth= grad[i, j] = (h3 - h_3 + 9 * (h_2 - h2) + 45 * (h1 - h_1)) / 60 * d_ m_out[i, j] = False continue - # If all value of buffer are available + # If all values of buffer are available grad[i, j] = ( (3 * (h_4 - h4) + 32 * (h3 - h_3) + 168 * (h_2 - h2) + 672 * (h1 - h_1)) / 840 diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 35c23817..6689c8e5 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -30,7 +30,7 @@ @njit(cache=True) def count_consecutive(mask): """ - Count consecutive event every False flag count restart + Count consecutive events every False flag count restart :param array[bool] mask: event to count :return: count when consecutive event @@ -50,7 +50,7 @@ def count_consecutive(mask): @njit(cache=True) def reverse_index(index, nb): """ - Compute a list of index, which are not in index. + Compute a list of indexes, which are not in index. :param array index: index of group which will be set to False :param array nb: Count for each group @@ -65,17 +65,17 @@ def reverse_index(index, nb): @njit(cache=True) def build_index(groups): - """We expected that variable is monotonous, and return index for each step change. + """We expect that variable is monotonous, and return index for each step change. - :param array groups: array which contain group to be separated - :return: (first_index of each group, last_index of each group, value to shift group) + :param array groups: array that contains groups to be separated + :return: (first_index of each group, last_index of each group, value to shift groups) :rtype: (array, array, int) - Examples -------- >>> build_index(array((1, 1, 3, 4, 4))) (array([0, 2, 2, 3]), array([2, 2, 3, 5]), 1) """ + i0, i1 = groups.min(), groups.max() amplitude = i1 - i0 + 1 # Index of first observation for each group @@ -83,7 +83,7 @@ def build_index(groups): for i, group in enumerate(groups[:-1]): # Get next value to compare next_group = groups[i + 1] - # if different we need to set index for all group between the 2 values + # if different we need to set index for all groups between the 2 values if group != next_group: first_index[group - i0 + 1 : next_group - i0 + 1] = i + 1 last_index = zeros(amplitude, dtype=numba_types.int_) @@ -95,21 +95,21 @@ def build_index(groups): @njit(cache=True) def hist_numba(x, bins): - """Call numba histogram to speed up.""" + """Call numba histogram to speed up.""" return histogram(x, bins) @njit(cache=True, fastmath=True, parallel=False) def distance_grid(lon0, lat0, lon1, lat1): """ - Get distance for every couple of point. + Get distance for every couple of points. :param array lon0: :param array lat0: :param array lon1: :param array lat1: - :return: nan value for far away point, and km for other + :return: nan value for far away points, and km for other :rtype: array """ nb_0 = lon0.shape[0] @@ -164,7 +164,7 @@ def cumsum_by_track(field, track): Cumsum by track. :param array field: data to sum - :pram array(int) track: id of track to separate data + :pram array(int) track: id of trajectories to separate data :return: cumsum with a reset at each start of track :rtype: array """ @@ -192,7 +192,7 @@ def interp2d_geo(x_g, y_g, z_g, m_g, x, y, nearest=False): :param array m_g: Boolean grid, True if value is masked :param array x: coordinate where interpolate z :param array y: coordinate where interpolate z - :param bool nearest: if true we will take nearest pixel + :param bool nearest: if True we will take nearest pixel :return: z interpolated :rtype: array """ @@ -256,17 +256,17 @@ def interp2d_bilinear(x_g, y_g, z_g, m_g, x, y): nb_x = x_g.shape[0] nb_y = y_g.shape[0] is_circular = abs(x_g[-1] % 360 - (x_g[0] - x_step) % 360) < 1e-5 - # Indices which should be never exist + # Indexes that should never exist i0_old, j0_old, masked = -100000000, -10000000, False z = empty(x.shape, dtype=z_g.dtype) for i in prange(x.size): x_ = (x[i] - x_ref) / x_step y_ = (y[i] - y_ref) / y_step i0 = int(floor(x_)) - # To keep original value if wrapping apply to compute xd + # To keep original values if wrapping applied to compute xd i0_ = i0 j0 = int(floor(y_)) - # corner are the same need only a new xd and yd + # corners are the same need only a new xd and yd if i0 != i0_old or j0 != j0_old: i1 = i0 + 1 j1 = j0 + 1 @@ -288,7 +288,7 @@ def interp2d_bilinear(x_g, y_g, z_g, m_g, x, y): z_g[i1, j1], ) masked = False - # Need to be store only on change + # Need to be stored only on change i0_old, j0_old = i0, j0 if masked: z[i] = nan @@ -359,17 +359,17 @@ def flatten_line_matrix(l_matrix): @njit(cache=True) def simplify(x, y, precision=0.1): """ - Will remove all middle/end point which are closer than precision. + Will remove all middle/end points closer than precision. :param array x: :param array y: - :param float precision: if two points have distance inferior to precision with remove next point + :param float precision: if two points have distance inferior to precision we remove next point :return: (x,y) :rtype: (array,array) """ precision2 = precision ** 2 nb = x.shape[0] - # will be True for value keep + # will be True for kept values mask = ones(nb, dtype=bool_) for j in range(0, nb): x_previous, y_previous = x[j], y[j] @@ -423,7 +423,7 @@ def split_line(x, y, i): :param y: array :param i: array of int at each i change, we cut x, y - :return: x and y separate by nan at each i jump + :return: x and y separated by nan at each i jump """ nb_jump = len(where(i[1:] - i[:-1] != 0)[0]) nb_value = x.shape[0] @@ -445,11 +445,11 @@ def split_line(x, y, i): @njit(cache=True) def wrap_longitude(x, y, ref, cut=False): """ - Will wrap contiguous longitude with reference as west bound. + Will wrap contiguous longitude with reference as western boundary. :param array x: :param array y: - :param float ref: longitude of reference, all the new value will be between ref and ref + 360 + :param float ref: longitude of reference, all the new values will be between ref and ref + 360 :param bool cut: if True line will be cut at the bounds :return: lon,lat :rtype: (array,array) @@ -557,7 +557,7 @@ def local_to_coordinates(x, y, lon0, lat0): @njit(cache=True, fastmath=True) def nearest_grd_indice(x, y, x0, y0, xstep, ystep): """ - Get nearest grid indice from a position. + Get nearest grid index from a position. :param x: longitude :param y: latitude @@ -575,7 +575,7 @@ def nearest_grd_indice(x, y, x0, y0, xstep, ystep): @njit(cache=True) def bbox_indice_regular(vertices, x0, y0, xstep, ystep, N, circular, x_size): """ - Get bbox indice of a contour in a regular grid. + Get bbox index of a contour in a regular grid. :param vertices: vertice of contour :param float x0: first grid longitude diff --git a/src/py_eddy_tracker/gui.py b/src/py_eddy_tracker/gui.py index 423ff306..a90a29a6 100644 --- a/src/py_eddy_tracker/gui.py +++ b/src/py_eddy_tracker/gui.py @@ -28,7 +28,7 @@ def __init__(self, *args, **kwargs): class GUIAxes(PlatCarreAxes): """ - Axes which will use full space available + Axes that uses full space available """ name = "full_axes" diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 5a01d452..bd8ac81d 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -13,17 +13,17 @@ def get_missing_indices( array_time, array_track, dt=1, flag_untrack=True, indice_untrack=0 ): - """return indices where it misses values + """Return indexes where values are missing :param np.array(int) array_time : array of strictly increasing int representing time - :param np.array(int) array_track: N° track where observation belong - :param int,float dt: theorical timedelta between 2 observation + :param np.array(int) array_track: N° track where observations belong + :param int,float dt: theorical timedelta between 2 observations :param bool flag_untrack: if True, ignore observations where n°track equal `indice_untrack` - :param int indice_untrack: n° representing where observations are untrack + :param int indice_untrack: n° representing where observations are untracked ex : array_time = np.array([67, 68, 70, 71, 74, 75]) - array_track= np.array([ 1, 1, 1, 1, 1, 1]) + array_track= np.array([ 1, 1, 1, 1, 1, 1]) return : np.array([2, 4, 4]) """ @@ -72,11 +72,11 @@ def fix_next_previous_obs(self): @abstractmethod def get_missing_indices(self, dt): - "find indices where observations is missing" + "Find indexes where observations are missing" pass def filled_by_interpolation(self, mask): - """Filled selected values by interpolation + """Fill selected values by interpolation :param array(bool) mask: True if must be filled by interpolation @@ -102,20 +102,20 @@ def filled_by_interpolation(self, mask): ) def insert_virtual(self): - """insert virtual observation on segments where observations were not found""" + """insert virtual observations on segments where observations are missing""" dt_theorical = median(self.time[1:] - self.time[:-1]) indices = self.get_missing_indices(dt_theorical) logger.info("%d virtual observation will be added", indices.size) - # new observation size + # new observations size size_obs_corrected = self.time.size + indices.size - # correction of indices for new size + # correction of indexes for new size indices_corrected = indices + arange(indices.size) - # creating mask with indices + # creating mask with indexes mask = zeros(size_obs_corrected, dtype=bool) mask[indices_corrected] = 1 @@ -128,12 +128,12 @@ def insert_virtual(self): def keep_tracks_by_date(self, date, nb_days): """ - Find tracks which exist at date `date` and lasted at least `nb_days` after. + Find tracks that exist at date `date` and lasted at least `nb_days` after. :param int,float date: date where the tracks must exist - :param int,float nb_days: number of time where the tracks must exist. Can be negative + :param int,float nb_days: number of times the tracks must exist. Can be negative - If nb_days is negative, it search a tracks which exist at the date, + If nb_days is negative, it searches a track that exists at the date, but existed at least `nb_days` before the date """ diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 91849955..c21659f0 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -77,10 +77,10 @@ def load_contour(self, filename): @njit(cache=True) def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): - """when an observation is virtual, we have to fix the previous and next obs + """When an observation is virtual, we have to fix the previous and next obs - :param np.array(int) next_obs : indice of next observation from network - :param np.array(int previous_obs: indice of previous observation from network + :param np.array(int) next_obs : index of next observation from network + :param np.array(int previous_obs: index of previous observation from network :param np.array(bool) flag_virtual: if observation is virtual or not """ @@ -88,8 +88,8 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): if not flag_virtual[i_o]: continue - # if there is many virtual side by side, there is some values writted multiple times. - # but it should not be slow + # if there are several consecutive virtuals, some values are written multiple times. + # but it should not be slow next_obs[i_o - 1] = i_o next_obs[i_o] = i_o + 1 previous_obs[i_o] = i_o - 1 @@ -108,16 +108,18 @@ def __init__(self, *args, **kwargs): def find_segments_relative(self, obs, stopped=None, order=1): """ - find all relative segments within an event from an order. + Find all relative segments linked with merging/splitting events at a specific order. - :param int obs: indice of event after the event - :param int stopped: indice of event before the event + :param int obs: index of event after the event + :param int stopped: index of event before the event :param int order: order of relatives accepted - :return: all segments relatives + :return: all relative segments :rtype: EddiesObservations """ + # FIXME : double "event" in the description, please clarify (event = chosen obs?) + # extraction of network where the event is network_id = self.tracks[obs] nw = self.network(network_id) @@ -133,9 +135,9 @@ def find_segments_relative(self, obs, stopped=None, order=1): return nw.relatives([i_obs, i_stopped], order=order) def get_missing_indices(self, dt): - """find indices where observations is missing. + """Find indexes where observations are missing. - As network have all untrack observation in tracknumber `self.NOGROUP`, + As network have all untracked observation in tracknumber `self.NOGROUP`, we don't compute them :param int,float dt: theorical delta time between 2 observations @@ -195,8 +197,8 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1): """ Select network on time duration - :param int nb_day_min: Minimal number of day covered by one network, if negative -> not used - :param int nb_day_max: Maximal number of day covered by one network, if negative -> not used + :param int nb_day_min: Minimal number of days covered by one network, if negative -> not used + :param int nb_day_max: Maximal number of days covered by one network, if negative -> not used """ if nb_day_max < 0: nb_day_max = 1000000000000 @@ -227,7 +229,7 @@ def from_split_network(cls, group_dataset, indexs, **kwargs): continue network[field][:] = group_dataset[field][index_order] network.segment[:] = indexs["track"][index_order] - # n & p must be re-index + # n & p must be re-indexed n, p = indexs["next_obs"][index_order], indexs["previous_obs"][index_order] # we add 2 for -1 index return index -1 translate = -ones(index_order.max() + 2, dtype="i4") @@ -243,7 +245,7 @@ def infos(self, label=""): def correct_close_events(self, nb_days_max=20): """ - transform event where + Transform event where segment A split to B, then A merge into B to @@ -255,6 +257,12 @@ def correct_close_events(self, nb_days_max=20): :param float nb_days_max: maximum time to search for splitting-merging event """ + # FIXME : we want to change + # segment A splits from segment B, then x days after segment B merges with A + # to + # segment A splits from segment B then x days after segement A merges with B (B will be longer) + # comments are in the wrong way but the example works as wanted + _time = self.time # segment used to correct and track changes segment = self.segment_track_array.copy() @@ -265,7 +273,7 @@ def correct_close_events(self, nb_days_max=20): previous_obs, next_obs = self.previous_obs, self.next_obs - # record for every segments, the slice, indice of next obs & indice of previous obs + # record for every segment the slice, index of next obs & index of previous obs for i, seg, _ in self.iter_on(segment): if i.start == i.stop: continue @@ -284,12 +292,12 @@ def correct_close_events(self, nb_days_max=20): n_seg = segment[i_seg_n] - # if segment has splitting + # if segment is split if i_seg_n != -1: seg2_slice, i2_seg_p, i2_seg_n = segments_connexion[n_seg] p2_seg = segment[i2_seg_p] - # if it merge on the first in a certain time + # if it merges on the first in a certain time if (p2_seg == seg_corrected) and ( _time[i_seg_n] - _time[i2_seg_p] < nb_days_max ): @@ -309,11 +317,10 @@ def correct_close_events(self, nb_days_max=20): def sort(self, order=("track", "segment", "time")): """ - sort observations + Sort observations - :param tuple order: order or sorting. Passed to :func:`numpy.argsort` + :param tuple order: order or sorting. Given to :func:`numpy.argsort` """ - index_order = self.obs.argsort(order=order) for field in self.elements: self[field][:] = self[field][index_order] @@ -329,17 +336,17 @@ def obs_relative_order(self, i_obs): def find_link(self, i_observations, forward=True, backward=False): """ - find all observations where obs `i_observation` could be + Find all observations where obs `i_observation` could be in future or past. - if forward=True, search all observation where water + If forward=True, search all observations where water from obs "i_observation" could go - if backward=True, search all observation + If backward=True, search all observation where water from obs `i_observation` could come from :param int,iterable(int) i_observation: - indices of observation. Can be + indexes of observation. Can be int, or iterable of int. :param bool forward, backward: if forward, search observations after obs. @@ -425,7 +432,7 @@ def func_backward(seg, indice): def connexions(self, multi_network=False): """ - create dictionnary for each segments, gives the segments which interact with + Create dictionnary for each segment, gives the segments in interaction with """ if multi_network: segment = self.segment_track_array @@ -444,7 +451,7 @@ def add_seg(father, child): if i.start == i.stop: continue i_p, i_n = previous_obs[i.start], next_obs[i.stop - 1] - # segment of interaction + # segment in interaction p_seg, n_seg = segment[i_p], segment[i_n] # Where segment are called if i_p != -1: @@ -489,10 +496,12 @@ def relatives(self, obs, order=2): """ Extract the segments at a certain order from multiple observations. - :param iterable,int obs: indices of observation for relatives computation. Can be one observation (int) or collection of observations (iterable(int)) - :param int order: order of relatives wanted. 0 means only observations in obs, 1 means direct relatives, ... + :param iterable,int obs: indexes of observation for relatives computation. + Can be one observation (int) or collection of observations (iterable(int)) + :param int order: order of relatives wanted. + 0 means only observations in obs, 1 means direct relatives (1 interaction event), ... - :return: all segments relatives + :return: all segments' relatives :rtype: EddiesObservations """ segment = self.segment_track_array @@ -559,7 +568,7 @@ def close_network(self, other, nb_obs_min=10, **kwargs): :param self other: Atlas to compare :param int nb_obs_min: Minimal number of overlap for one trajectory :param dict kwargs: keyword arguments for match function - :return: return other atlas reduce to common track with self + :return: return other atlas reduced to common tracks with self .. warning:: It could be a costly operation for huge dataset @@ -662,7 +671,7 @@ def display_timeline( **kwargs, ): """ - Plot a timeline of a network. + Plot the timeline of a network. Must be called on only one network. :param matplotlib.axes.Axes ax: matplotlib axe used to draw @@ -723,7 +732,7 @@ def display_timeline( return mappables def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="roll"): - """mark events in plot""" + """Mark events in plot""" j = 0 events = dict(spliting=[], merging=[]) @@ -837,11 +846,11 @@ def map_segment(self, method, y, same=True, **kw): def map_network(self, method, y, same=True, return_dict=False, **kw): """ - transform data `y` with method `method` for each track. + Transform data `y` with method `method` for each track. - :param Callable method: method to apply on each tracks + :param Callable method: method to apply on each track :param np.array y: data where to apply method - :param bool same: if True, return array same size from y. else, return list with track edited + :param bool same: if True, return an array with the same size than y. Else, return a list with the edited tracks :param bool return_dict: if None, mean values are used :param float kw: to multiply field :return: array or dict of result from method for each network @@ -849,7 +858,7 @@ def map_network(self, method, y, same=True, return_dict=False, **kw): if same and return_dict: raise NotImplementedError( - "both condition 'same' and 'return_dict' should no be true" + "both conditions 'same' and 'return_dict' should no be true" ) if same: @@ -893,7 +902,7 @@ def scatter_timeline( **kwargs, ): """ - Must be call on only one network + Must be called on only one network """ self.only_one_network() y = (self.segment if yfield is None else self.parse_varname(yfield)) * yfactor @@ -913,7 +922,7 @@ def scatter_timeline( return mappables def event_map(self, ax, **kwargs): - """Add the merging and splitting events """ + """Add the merging and splitting events to a map""" j = 0 mappables = dict() symbol_kw = dict( @@ -957,12 +966,12 @@ def scatter( **kwargs, ): """ - This function will scatter the path of each network, with the merging and splitting events + This function scatters the path of each network, with the merging and splitting events :param matplotlib.axes.Axes ax: matplotlib axe used to draw :param str,array,None name: - variable used to fill the contour, if None all elements have the same color - :param float,None ref: if define use like west bound + variable used to fill the contours, if None all elements have the same color + :param float,None ref: if defined, ref is used as western boundary :param float factor: multiply value by :param list edgecolor_cycle: list of colors :param dict kwargs: look at :py:meth:`matplotlib.axes.Axes.scatter` @@ -1120,7 +1129,7 @@ def spliting_event(self, triplet=False, only_index=False): def dissociate_network(self): """ - Dissociate network with no known interaction (spliting/merging) + Dissociate networks with no known interaction (spliting/merging) """ tags = self.tag_segment(multi_network=True) @@ -1134,7 +1143,7 @@ def dissociate_network(self): self.obs.sort(order=("track", "segment", "time"), kind="mergesort") self._index_network = None - # n & p must be re-index + # n & p must be re-indexed n, p = self.next_obs, self.previous_obs # we add 2 for -1 index return index -1 nb_obs = len(self) @@ -1157,17 +1166,17 @@ def __tag_segment(cls, seg, tag, groups, connexions): """ Will set same temporary ID for each connected segment. - :param int seg: current ID of seg - :param ing tag: temporary ID to set for seg and its connexion - :param array[int] groups: array where tag will be stored - :param dict connexions: gives for one ID of seg all seg connected + :param int seg: current ID of segment + :param ing tag: temporary ID to set for segment and its connexion + :param array[int] groups: array where tag is stored + :param dict connexions: gives for one ID of segment all connected segments """ - # If seg are already used we stop recursivity + # If segments are already used we stop recursivity if groups[seg] != 0: return - # We set tag for this seg + # We set tag for this segment groups[seg] = tag - # Get all connexions of this seg + # Get all connexions of this segment segs = connexions.get(seg, None) if segs is not None: for seg in segs: @@ -1197,14 +1206,17 @@ def fully_connected(self): return self.tag_segment().shape[0] == 1 def remove_trash(self): + """ + Remove the lonely eddies (only 1 obs in segment, associated segment number is 0) + """ return self.extract_with_mask(self.track != 0) def plot(self, ax, ref=None, color_cycle=None, **kwargs): """ - This function will draw path of each trajectory + This function draws the path of each trajectory :param matplotlib.axes.Axes ax: ax to draw - :param float,int ref: if defined, all coordinates will be wrapped with ref like west boundary + :param float,int ref: if defined, all coordinates are wrapped with ref as western boundary :param dict kwargs: keyword arguments for Axes.plot :return: a list of matplotlib mappables """ @@ -1231,15 +1243,15 @@ def plot(self, ax, ref=None, color_cycle=None, **kwargs): def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): """ - Remove short segment which didn't connect several segment + Remove short segments that don't connect several segments - :param int nobs: Minimal number of observation to keep segment - :param int ndays: Minimal number of days to keep segment - :param int recursive: Run method N times more - :param int mask: if one or more observation of segment are select by mask, the segment is keep + :param int nobs: Minimal number of observation to keep a segment + :param int ndays: Minimal number of days to keep a segment + :param int recursive: Run method N times more + :param int mask: if one or more observation of the segment are selected by mask, the segment is kept .. warning:: - It will remove short segment which splits than merges with same segment + It will remove short segment that splits from then merges with the same segment """ segments_keep = list() connexions = self.connexions(multi_network=True) @@ -1275,8 +1287,8 @@ def extract_with_period(self, period): """ Extract within a time period - :param (int,int) period: two dates to define the period, must be specify from 1/1/1950 - :return: Return all eddy tracks which are in bounds + :param (int,int) period: two dates to define the period, must be specified from 1/1/1950 + :return: Return all eddy trajectories in period :rtype: NetworkObservations .. minigallery:: py_eddy_tracker.NetworkObservations.extract_with_period @@ -1313,7 +1325,7 @@ def extract_with_mask(self, mask): logger.warning("Empty dataset will be created") else: logger.info( - f"{nb_obs} observations will be extract ({nb_obs / self.shape[0]:.3%})" + f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" ) for field in self.obs.dtype.descr: if field in ("next_obs", "previous_obs"): @@ -1403,7 +1415,7 @@ def group_translator(nb, duos): Create a translator with all duos :param int nb: size of translator - :param set((int, int)) duos: set of all group which must be join + :param set((int, int)) duos: set of all groups that must be joined Examples -------- @@ -1424,7 +1436,7 @@ def group_observations(self, **kwargs): for i, filename in enumerate(self.filenames): if display_iteration: print(f"{filename} compared to {self.window} next", end="\r") - # Load observations with function to buffered observations + # Load observations with function to buffer observations xi, yi = self.buffer.load_contour(filename) # Append number of observations by filename nb_obs.append(xi.shape[0]) @@ -1449,7 +1461,7 @@ def build_dataset(self, group, raw_data=True): model = TrackEddiesObservations.load_file(self.filenames[-1], raw_data=raw_data) eddies = TrackEddiesObservations.new_like(model, nb_obs) eddies.sign_type = model.sign_type - # Get new index to re-order observation by group + # Get new index to re-order observations by groups new_i = get_next_index(group) display_iteration = logger.getEffectiveLevel() == logging.INFO elements = eddies.elements @@ -1477,7 +1489,7 @@ def build_dataset(self, group, raw_data=True): @njit(cache=True) def get_next_index(gr): - """Return for each obs index the new position to join all group""" + """Return for each obs index the new position to join all groups""" nb_obs_gr = bincount(gr) i_gr = nb_obs_gr.cumsum() - nb_obs_gr new_index = empty(gr.shape, dtype=uint32) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 5ee45a28..d6f3c899 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -224,7 +224,7 @@ def __eq__(self, other): return array_equal(self.obs, other.obs) def get_color(self, i): - """Return colors like a cyclic list""" + """Return colors as a cyclic list""" return self.COLORS[i % self.NB_COLORS] @property @@ -260,7 +260,7 @@ def hist(self, varname, x, bins, percent=False, mean=False, nb=False): :param str,array varname: variable to use to compute stat :param str,array x: variable to use to know in which bins :param array bins: - :param bool percent: normalize by sum of all bins + :param bool percent: normalized by sum of all bins :param bool mean: compute mean by bins :param bool nb: only count by bins :return: value by bins @@ -279,7 +279,7 @@ def hist(self, varname, x, bins, percent=False, mean=False, nb=False): @staticmethod def box_display(value): - """Return value evenly spaced with few numbers""" + """Return values evenly spaced with few numbers""" return "".join([f"{v_:10.2f}" for v_ in value]) def field_table(self): @@ -437,7 +437,7 @@ def add_rotation_type(self): def circle_contour(self, only_virtual=False, factor=1): """ - Set contours as a circles from radius and center data. + Set contours as circles from radius and center data. .. minigallery:: py_eddy_tracker.EddiesObservations.circle_contour """ @@ -572,7 +572,7 @@ def iter_on(self, xname, bins=None): i = numba_digitize(x, bins) - 1 # Order by bins i_sort = i.argsort() - # If in reduce mode we will translate i_sort in full array index + # If in reduced mode we will translate i_sort in full array index i_sort_ = translate[i_sort] if test else i_sort # Bound for each bins in sorting view i0, i1, _ = build_index(i[i_sort]) @@ -613,7 +613,7 @@ def align_on(self, other, var_name="time", **kwargs): yield indexs_self, indexs_other, b0_self, b1_self def insert_observations(self, other, index): - """Insert other obs in self at the index.""" + """Insert other obs in self at the given index.""" if not self.coherence(other): raise Exception("Observations with no coherence") insert_size = len(other.obs) @@ -742,10 +742,10 @@ def load_from_zarr( """Load data from zarr. :param str,store filename: path or store to load data - :param bool raw_data: If true load data without apply scale_factor and add_offset - :param None,list(str) remove_vars: List of variable name which will be not loaded + :param bool raw_data: If true load data without scale_factor and add_offset + :param None,list(str) remove_vars: List of variable name that will be not loaded :param None,list(str) include_vars: If defined only this variable will be loaded - :param None,dict indexs: Indexs to laad only a slice of data + :param None,dict indexs: Indexes to load only a slice of data :param int buffer_size: Size of buffer used to load zarr data :param class_kwargs: argument to set up observations class :return: Obsevations selected @@ -764,7 +764,7 @@ def load_from_zarr( nb_obs = getattr(h_zarr, var_list[0]).shape[0] dims = list(cls.zarr_dimension(filename)) if len(dims) == 2 and nb_obs in dims: - # FIXME must be investigate, in zarr no dimensions name (or could be add in attr) + # FIXME must be investigated, in zarr no dimensions name (or could be add in attr) array_dim = dims[1] if nb_obs == dims[0] else dims[0] if indexs is not None and "obs" in indexs: sl = indexs["obs"] @@ -885,7 +885,7 @@ def load_from_netcdf( :param bool raw_data: If true load data without apply scale_factor and add_offset :param None,list(str) remove_vars: List of variable name which will be not loaded :param None,list(str) include_vars: If defined only this variable will be loaded - :param None,dict indexs: Indexs to laad only a slice of data + :param None,dict indexs: Indexes to load only a slice of data :param class_kwargs: argument to set up observations class :return: Obsevations selected :return type: class @@ -1054,7 +1054,7 @@ def propagate( self, previous_obs, current_obs, obs_to_extend, dead_track, nb_next, model ): """ - Filled virtual obs (C). + Fill virtual obs (C). :param previous_obs: previous obs from current (A) :param current_obs: previous obs from virtual (B) @@ -1166,7 +1166,7 @@ def re_reference_index(index, ref): :param array,int index: local index to re ref :param slice,array ref: reference could be a slice in this case we juste add start to index - or could be indexs and in this case we need to translate + or could be indexes and in this case we need to translate """ if isinstance(ref, slice): return index + ref.start @@ -1330,18 +1330,18 @@ def solve_conflict(cost): def solve_simultaneous(cost): """Write something (TODO)""" mask = ~cost.mask - # Count number of link by self obs and other obs + # Count number of links by self obs and other obs self_links, other_links = sum_row_column(mask) max_links = max(self_links.max(), other_links.max()) if max_links > 5: logger.warning("One observation have %d links", max_links) - # If some obs have multiple link, we keep only one link by eddy + # If some obs have multiple links, we keep only one link by eddy eddies_separation = 1 < self_links eddies_merge = 1 < other_links test = eddies_separation.any() or eddies_merge.any() if test: - # We extract matrix which contains concflict + # We extract matrix that contains conflict obs_linking_to_self = mask[eddies_separation].any(axis=0) obs_linking_to_other = mask[:, eddies_merge].any(axis=1) i_self_keep = where(obs_linking_to_other + eddies_separation)[0] @@ -1364,13 +1364,13 @@ def solve_simultaneous(cost): security_increment = 0 while False in cost_reduce.mask: if security_increment > max_iteration: - # Maybe check if the size decrease if not rise an exception + # Maybe check if the size decreases if not rise an exception # x_i, y_i = where(-cost_reduce.mask) raise Exception("To many iteration: %d" % security_increment) security_increment += 1 i_min_value = cost_reduce.argmin() i, j = floor(i_min_value / shape[1]).astype(int), i_min_value % shape[1] - # Set to False all link + # Set to False all links mask[i_self_keep[i]] = False mask[:, i_other_keep[j]] = False cost_reduce.mask[i] = True @@ -1384,19 +1384,19 @@ def solve_simultaneous(cost): @staticmethod def solve_first(cost, multiple_link=False): mask = ~cost.mask - # Count number of link by self obs and other obs + # Count number of links by self obs and other obs self_links = mask.sum(axis=1) other_links = mask.sum(axis=0) max_links = max(self_links.max(), other_links.max()) if max_links > 5: logger.warning("One observation have %d links", max_links) - # If some obs have multiple link, we keep only one link by eddy + # If some obs have multiple links, we keep only one link by eddy eddies_separation = 1 < self_links eddies_merge = 1 < other_links test = eddies_separation.any() or eddies_merge.any() if test: - # We extract matrix which contains concflict + # We extract matrix that contains conflict obs_linking_to_self = mask[eddies_separation].any(axis=0) obs_linking_to_other = mask[:, eddies_merge].any(axis=1) i_self_keep = where(obs_linking_to_other + eddies_separation)[0] @@ -1700,7 +1700,7 @@ def set_global_attr_netcdf(self, h_nc): def mask_from_polygons(self, polygons): """ - Return mask for all observation in one of polygons list + Return mask for all observations in one of polygons list :param list((array,array)) polygons: list of x/y array which be used to identify observations """ @@ -1724,7 +1724,7 @@ def extract_with_area(self, area, **kwargs): :param dict area: 4 coordinates in a dictionary to specify bounding box (lower left corner and upper right corner) :param dict kwargs: look at :py:meth:`extract_with_mask` - :return: Return all eddy tracks which are in bounds + :return: Return all eddy trajetories in bounds :rtype: EddiesObservations .. code-block:: python @@ -1745,7 +1745,7 @@ def time_sub_sample(self, t0, time_step): """ Time sub sampling - :param int,float t0: reference time which will be keep + :param int,float t0: reference time that will be keep :param int,float time_step: keep every observation spaced by time_step """ mask = (self.time - t0) % time_step == 0 @@ -1779,7 +1779,7 @@ def scatter(self, ax, name=None, ref=None, factor=1, **kwargs): :param matplotlib.axes.Axes ax: matplotlib axe used to draw :param str,array,None name: variable used to fill the contour, if None all elements have the same color - :param float,None ref: if define use like west bound + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param float factor: multiply value by :param dict kwargs: look at :py:meth:`matplotlib.axes.Axes.scatter` :return: scatter mappable @@ -1811,7 +1811,7 @@ def filled( """ :param matplotlib.axes.Axes ax: matplotlib axe used to draw :param str,array,None varname: variable used to fill the contours, or an array of same size than obs - :param float,None ref: if define use like west bound? + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param bool intern: if True draw speed contours instead of effective contours :param str cmap: matplotlib colormap name :param int,None lut: Number of colors in the colormap @@ -2271,7 +2271,7 @@ def period(self): @property def nb_days(self): - """Return period days cover by dataset + """Return period in days covered by the dataset :return: Number of days :rtype: int @@ -2282,7 +2282,7 @@ def nb_days(self): @njit(cache=True) def grid_count_(grid, i, j): """ - Add one to each index + Add 1 to each index """ for i_, j_ in zip(i, j): grid[i_, j_] += 1 @@ -2305,7 +2305,7 @@ def grid_count_pixel_in( y_c, ): """ - Count how many time a pixel is used. + Count how many times a pixel is used. :param array grid: :param array x: x for all contour diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 5902462d..ad1847d2 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -77,7 +77,7 @@ def iter_track(self): yield self.index(slice(i0, i0 + nb)) def get_missing_indices(self, dt): - """find indices where observations is missing. + """Find indexes where observations are missing. :param int,float dt: theorical delta time between 2 observations """ @@ -90,7 +90,7 @@ def get_missing_indices(self, dt): ) def fix_next_previous_obs(self): - """function used after 'insert_virtual', to correct next_obs and + """Function used after 'insert_virtual', to correct next_obs and previous obs. """ @@ -99,7 +99,7 @@ def fix_next_previous_obs(self): @property def nb_tracks(self): """ - Will count and send number of track + Count and return number of track """ if self.__nb_track is None: if len(self) == 0: @@ -150,7 +150,7 @@ def add_distance(self): def distance_to_next(self): """ - :return: array of distance in m, 0 when next obs if from another track + :return: array of distance in m, 0 when next obs is from another track :rtype: array """ d = distance( @@ -166,7 +166,7 @@ def distance_to_next(self): return d_ def normalize_longitude(self): - """Normalize all longitude + """Normalize all longitudes Normalize longitude field and in the same range : - longitude_max @@ -217,7 +217,7 @@ def elements(self): return list(set(elements)) def set_global_attr_netcdf(self, h_nc): - """Set global attr""" + """Set global attributes""" h_nc.title = "Cyclonic" if self.sign_type == -1 else "Anticyclonic" h_nc.Metadata_Conventions = "Unidata Dataset Discovery v1.0" h_nc.comment = "Surface product; mesoscale eddies" @@ -239,9 +239,9 @@ def extract_with_period(self, period, **kwargs): """ Extract within a time period - :param (int,int) period: two dates to define the period, must be specify from 1/1/1950 + :param (int,int) period: two dates to define the period, must be specified from 1/1/1950 :param dict kwargs: look at :py:meth:`extract_with_mask` - :return: Return all eddy tracks which are in bounds + :return: Return all eddy tracks in period :rtype: TrackEddiesObservations .. minigallery:: py_eddy_tracker.TrackEddiesObservations.extract_with_period @@ -264,9 +264,9 @@ def get_azimuth(self, equatorward=False): """ Return azimuth for each track. - Azimuth is computed with first and last observation + Azimuth is computed with first and last observations - :param bool equatorward: If True, Poleward are positive and equatorward negative + :param bool equatorward: If True, Poleward is positive and Equatorward negative :rtype: array """ i0, nb = self.index_from_track, self.nb_obs_by_track @@ -427,7 +427,7 @@ def extract_with_length(self, bounds): track_mask = self.nb_obs_by_track >= b0 else: logger.warning("No valid value for bounds") - raise Exception("One bounds must be positiv") + raise Exception("One bound must be positive") return self.extract_with_mask(track_mask.repeat(self.nb_obs_by_track)) def empty_dataset(self): @@ -474,7 +474,7 @@ def extract_with_mask( :param bool full_path: extract the full trajectory if only one part is selected :param bool remove_incomplete: delete trajectory if not fully selected :param bool compress_id: resample trajectory number to use a smaller range - :param bool reject_virtual: if only virtual are selected, the trajectory is removed + :param bool reject_virtual: if only virtuals are selected, the trajectory is removed :return: same object with the selected observations :rtype: self.__class__ """ @@ -525,10 +525,10 @@ def shape_polygon(self, intern=False): def display_shape(self, ax, ref=None, intern=False, **kwargs): """ - This function will draw the shape of each trajectory + This function draws the shape of each trajectory :param matplotlib.axes.Axes ax: ax to draw - :param float,int ref: if defined all coordinates will be wrapped with ref like west boundary + :param float,int ref: if defined, all coordinates are wrapped with ref as western boundary :param bool intern: If True use speed contour instead of effective contour :param dict kwargs: keyword arguments for Axes.plot :return: matplotlib mappable @@ -557,7 +557,7 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): :param self other: Atlas to compare :param int nb_obs_min: Minimal number of overlap for one trajectory :param dict kwargs: keyword arguments for match function - :return: return other atlas reduce to common track with self + :return: return other atlas reduced to common trajectories with self .. warning:: It could be a costly operation for huge dataset @@ -585,7 +585,7 @@ def plot(self, ax, ref=None, **kwargs): This function will draw path of each trajectory :param matplotlib.axes.Axes ax: ax to draw - :param float,int ref: if defined, all coordinates will be wrapped with ref like west boundary + :param float,int ref: if defined, all coordinates are wrapped with ref as western boundary :param dict kwargs: keyword arguments for Axes.plot :return: matplotlib mappable """ @@ -791,10 +791,10 @@ def track_loess_filter(half_window, x, y, track): """ Apply a loess filter on y field - :param int,float window: parameter of smoother + :param int,float half_window: parameter of smoother :param array_like x: must be growing for each track but could be irregular :param array_like y: field to smooth - :param array_like track: field which allow to separate path + :param array_like track: field that allows to separate path :return: Array smoothed :rtype: array_like From 08d7dc80d5b9ce4f7c1845846215d8e5bdc48fb5 Mon Sep 17 00:00:00 2001 From: CoriPegliasco <66008544+CoriPegliasco@users.noreply.github.com> Date: Wed, 7 Apr 2021 22:18:26 +0200 Subject: [PATCH 017/115] indexes -> indices (#80) --- src/py_eddy_tracker/dataset/grid.py | 2 +- src/py_eddy_tracker/generic.py | 2 +- src/py_eddy_tracker/observations/network.py | 8 ++++---- src/py_eddy_tracker/observations/observation.py | 6 +++--- src/py_eddy_tracker/observations/tracking.py | 4 ++-- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 14fe9ae3..11227475 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -1275,7 +1275,7 @@ def get_step_in_km(self, lat, wave_length): min_wave_length = max(step_x_km, step_y_km) * 2 if wave_length < min_wave_length: logger.error( - "Wave_length too short for resolution, must be > %d km", + "wave_length too short for resolution, must be > %d km", ceil(min_wave_length), ) raise Exception() diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 6689c8e5..530c2136 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -50,7 +50,7 @@ def count_consecutive(mask): @njit(cache=True) def reverse_index(index, nb): """ - Compute a list of indexes, which are not in index. + Compute a list of indices, which are not in index. :param array index: index of group which will be set to False :param array nb: Count for each group diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index c21659f0..a2fe8a0d 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -135,7 +135,7 @@ def find_segments_relative(self, obs, stopped=None, order=1): return nw.relatives([i_obs, i_stopped], order=order) def get_missing_indices(self, dt): - """Find indexes where observations are missing. + """Find indices where observations are missing. As network have all untracked observation in tracknumber `self.NOGROUP`, we don't compute them @@ -215,7 +215,7 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1): @classmethod def from_split_network(cls, group_dataset, indexs, **kwargs): """ - Build a NetworkObservations object with Group dataset and indexes + Build a NetworkObservations object with Group dataset and indices :param TrackEddiesObservations group_dataset: Group dataset :param indexs: result from split_network @@ -346,7 +346,7 @@ def find_link(self, i_observations, forward=True, backward=False): where water from obs `i_observation` could come from :param int,iterable(int) i_observation: - indexes of observation. Can be + indices of observation. Can be int, or iterable of int. :param bool forward, backward: if forward, search observations after obs. @@ -496,7 +496,7 @@ def relatives(self, obs, order=2): """ Extract the segments at a certain order from multiple observations. - :param iterable,int obs: indexes of observation for relatives computation. + :param iterable,int obs: indices of observation for relatives computation. Can be one observation (int) or collection of observations (iterable(int)) :param int order: order of relatives wanted. 0 means only observations in obs, 1 means direct relatives (1 interaction event), ... diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index d6f3c899..173f6c56 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -594,7 +594,7 @@ def iter_on(self, xname, bins=None): def align_on(self, other, var_name="time", **kwargs): """ - Align the time indexes of two datasets. + Align the time indices of two datasets. .. minigallery:: py_eddy_tracker.EddiesObservations.align_on """ @@ -1135,7 +1135,7 @@ def match( :param bool intern: if True, speed contour is used (default = effective contour) :param float cmin: 0 < cmin < 1, return only couples with score >= cmin :param dict kwargs: look at :py:meth:`vertice_overlap` - :return: return the indexes of the eddies in self coupled with eddies in + :return: return the indices of the eddies in self coupled with eddies in other and their associated score :rtype: (array(int), array(int), array(float)) @@ -1166,7 +1166,7 @@ def re_reference_index(index, ref): :param array,int index: local index to re ref :param slice,array ref: reference could be a slice in this case we juste add start to index - or could be indexes and in this case we need to translate + or could be indices and in this case we need to translate """ if isinstance(ref, slice): return index + ref.start diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index ad1847d2..b632270c 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -77,7 +77,7 @@ def iter_track(self): yield self.index(slice(i0, i0 + nb)) def get_missing_indices(self, dt): - """Find indexes where observations are missing. + """Find indices where observations are missing. :param int,float dt: theorical delta time between 2 observations """ @@ -618,7 +618,7 @@ def split_network(self, intern=True, **kwargs): # Initialisation # To store the id of the segments, the backward and forward cost associations ids["track"], ids["previous_cost"], ids["next_cost"] = 0, 0, 0 - # To store the indexes of the backward and forward observations associated + # To store the indices of the backward and forward observations associated ids["previous_obs"], ids["next_obs"] = -1, -1 # At the end, ids["previous_obs"] == -1 means the start of a non-split segment # and ids["next_obs"] == -1 means the end of a non-merged segment From 4bf81470ee01603612f6f23063ac498a7d1877fe Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 12 Apr 2021 23:31:40 +0200 Subject: [PATCH 018/115] Add informations about speed profile in an example --- examples/01_general_things/pet_storage.py | 57 +++++++++++++++++-- .../01_general_things/pet_storage.ipynb | 57 ++++++++++++++++++- 2 files changed, 105 insertions(+), 9 deletions(-) diff --git a/examples/01_general_things/pet_storage.py b/examples/01_general_things/pet_storage.py index 9f0ec61e..ccd01f1c 100644 --- a/examples/01_general_things/pet_storage.py +++ b/examples/01_general_things/pet_storage.py @@ -16,8 +16,10 @@ """ import py_eddy_tracker_sample +from matplotlib import pyplot as plt +from numpy import arange, outer -from py_eddy_tracker.data import get_demo_path, get_remote_demo_sample +from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.observation import EddiesObservations, Table from py_eddy_tracker.observations.tracking import TrackEddiesObservations @@ -58,6 +60,53 @@ # --------------- # All contours are stored on the same number of points, and are resampled if needed with an algorithm to be stored as objects +# %% +# Speed profile storage +# --------------------- +# Speed profile is an interpolation of speed mean along each contour. +# For each contour included in eddy, we compute mean of speed along the contour, +# and after we interpolate speed mean array on a fixed size array. +# +# Several field are available to understand "uavg_profile" : +# 0. - num_contours : Number of contour in eddies, must be equal to amplitude divide by isoline step +# 1. - height_inner_contour : height of inner contour used +# 2. - height_max_speed_contour : height of max speed contour used +# 3. - height_external_contour : height of outter contour used +# +# Last value of "uavg_profile" is for inner contour and first value for outter contour. + +# Observations selection of "uavg_profile" with high number of contour(Eddy with high amplitude) +e = eddies_collections.extract_with_mask(eddies_collections.num_contours > 15) + +# %% + +# Raw display of profiles with more than 15 contours +ax = plt.subplot(111) +_ = ax.plot(e.uavg_profile.T, lw=0.5) + +# %% + +# Profile from inner to outter +ax = plt.subplot(111) +ax.plot(e.uavg_profile[:, ::-1].T, lw=0.5) +_ = ax.set_xlabel("From inner to outter contour"), ax.set_ylabel("Speed (m/s)") + +# %% + +# If we normalize indice of contour to set speed contour to 1 and inner contour to 0 +ax = plt.subplot(111) +h_in = e.height_inner_contour +h_s = e.height_max_speed_contour +h_e = e.height_external_contour +r = (h_e - h_in) / (h_s - h_in) +nb_pt = e.uavg_profile.shape[1] +# Create an x array for each profile +x = outer(arange(nb_pt) / nb_pt, r) + +ax.plot(x, e.uavg_profile[:, ::-1].T, lw=0.5) +_ = ax.set_xlabel("From inner to outter contour"), ax.set_ylabel("Speed (m/s)") + + # %% # Trajectories # ------------ @@ -86,11 +135,7 @@ # - next_obs : Index of the next observation in the full dataset, if -1 there are no next observation (the segment ends) # - previous_cost : Result of the cost function (1 is a good association, 0 is bad) with previous observation # - next_cost : Result of the cost function (1 is a good association, 0 is bad) with next observation -eddies_network = NetworkObservations.load_file( - get_remote_demo_sample( - "eddies_med_adt_allsat_dt2018_err70_filt500_order1/Anticyclonic_network.nc" - ) -) +eddies_network = NetworkObservations.load_file(get_demo_path("network_med.nc")) eddies_network.field_table() # %% diff --git a/notebooks/python_module/01_general_things/pet_storage.ipynb b/notebooks/python_module/01_general_things/pet_storage.ipynb index 4b4a6630..250591ee 100644 --- a/notebooks/python_module/01_general_things/pet_storage.ipynb +++ b/notebooks/python_module/01_general_things/pet_storage.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import py_eddy_tracker_sample\n\nfrom py_eddy_tracker.data import get_demo_path, get_remote_demo_sample\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.observation import EddiesObservations, Table\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" + "import py_eddy_tracker_sample\nfrom matplotlib import pyplot as plt\nfrom numpy import arange, outer\n\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.observations.observation import EddiesObservations, Table\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" ] }, { @@ -108,6 +108,57 @@ "## Contour storage\nAll contours are stored on the same number of points, and are resampled if needed with an algorithm to be stored as objects\n\n" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Speed profile storage\nSpeed profile is an interpolation of speed mean along each contour.\nFor each contour included in eddy, we compute mean of speed along the contour,\nand after we interpolate speed mean array on a fixed size array.\n\nSeveral field are available to understand \"uavg_profile\" :\n 0. - num_contours : Number of contour in eddies, must be equal to amplitude divide by isoline step\n 1. - height_inner_contour : height of inner contour used\n 2. - height_max_speed_contour : height of max speed contour used\n 3. - height_external_contour : height of outter contour used\n\nLast value of \"uavg_profile\" is for inner contour and first value for outter contour.\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Observations selection of \"uavg_profile\" with high number of contour(Eddy with high amplitude)\ne = eddies_collections.extract_with_mask(eddies_collections.num_contours > 15)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Raw display of profiles with more than 15 contours\nax = plt.subplot(111)\n_ = ax.plot(e.uavg_profile.T, lw=0.5)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Profile from inner to outter\nax = plt.subplot(111)\nax.plot(e.uavg_profile[:, ::-1].T, lw=0.5)\n_ = ax.set_xlabel(\"From inner to outter contour\"), ax.set_ylabel(\"Speed (m/s)\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# If we normalize indice of contour to set speed contour to 1 and inner contour to 0\nax = plt.subplot(111)\nh_in = e.height_inner_contour\nh_s = e.height_max_speed_contour\nh_e = e.height_external_contour\nr = (h_e - h_in) / (h_s - h_in)\nnb_pt = e.uavg_profile.shape[1]\n# Create an x array for each profile\nx = outer(arange(nb_pt) / nb_pt, r)\n\nax.plot(x, e.uavg_profile[:, ::-1].T, lw=0.5)\n_ = ax.set_xlabel(\"From inner to outter contour\"), ax.set_ylabel(\"Speed (m/s)\")" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -141,7 +192,7 @@ }, "outputs": [], "source": [ - "eddies_network = NetworkObservations.load_file(\n get_remote_demo_sample(\n \"eddies_med_adt_allsat_dt2018_err70_filt500_order1/Anticyclonic_network.nc\"\n )\n)\neddies_network.field_table()" + "eddies_network = NetworkObservations.load_file(get_demo_path(\"network_med.nc\"))\neddies_network.field_table()" ] }, { @@ -179,7 +230,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, From b91c786b0045018115ff6596e05bf7329ccc3cf4 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Wed, 28 Apr 2021 21:55:48 +0200 Subject: [PATCH 019/115] Add information about method --- examples/07_cube_manipulation/pet_fsle_med.py | 4 + examples/16_network/pet_ioannou_2017_case.py | 10 +- .../01_general_things/pet_storage.ipynb | 2 +- .../07_cube_manipulation/pet_fsle_med.ipynb | 4 +- .../14_generic_tools/pet_fit_contour.ipynb | 12 +-- .../16_network/pet_follow_particle.ipynb | 6 +- .../16_network/pet_ioannou_2017_case.ipynb | 92 ++++++++++++++++++- 7 files changed, 112 insertions(+), 18 deletions(-) diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index 46c5fdcc..b4a51265 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -26,6 +26,10 @@ # %% # ADT in med # ---------- +# :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is +# made for data stores in time cube, you could use also +# :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to +# load data-cube from multiple file. c = GridCollection.from_netcdf_cube( get_demo_path("dt_med_allsat_phy_l4_2005T2.nc"), "longitude", diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 2bffbb31..768f0c88 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -21,7 +21,7 @@ from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations -from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates +from py_eddy_tracker.generic import coordinates_to_local from py_eddy_tracker.poly import fit_ellipse # %% @@ -217,22 +217,22 @@ def update_axes(ax, mappable=None): # Theta ax = timeline_axes() m = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi / 2, vmax=pi / 2, cmap="hsv") -cb = update_axes(ax, m["scatter"]) +_ = update_axes(ax, m["scatter"]) # %% # a ax = timeline_axes() m = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r") -cb = update_axes(ax, m["scatter"]) +_ = update_axes(ax, m["scatter"]) # %% # b ax = timeline_axes() m = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r") -cb = update_axes(ax, m["scatter"]) +_ = update_axes(ax, m["scatter"]) # %% # a/b ax = timeline_axes() m = close_to_i3.scatter_timeline(ax, a_ / b_, vmin=1, vmax=2, cmap="Spectral_r") -cb = update_axes(ax, m["scatter"]) +_ = update_axes(ax, m["scatter"]) diff --git a/notebooks/python_module/01_general_things/pet_storage.ipynb b/notebooks/python_module/01_general_things/pet_storage.ipynb index 250591ee..fa8d1a55 100644 --- a/notebooks/python_module/01_general_things/pet_storage.ipynb +++ b/notebooks/python_module/01_general_things/pet_storage.ipynb @@ -40,7 +40,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Eddies files (zarr or netcdf) could be loaded with ```load_file``` method:\n\n" + "Eddies files (zarr or netcdf) can be loaded with ```load_file``` method:\n\n" ] }, { diff --git a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb index e821df6d..4f2e1467 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb @@ -33,7 +33,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## ADT in med\n\n" + "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb index 4cec72b2..5306fa0c 100644 --- a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb +++ b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "from matplotlib import pyplot as plt\nfrom numpy import cos, linspace, radians, sin\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates\nfrom py_eddy_tracker.observations.observation import EddiesObservations\nfrom py_eddy_tracker.poly import fit_circle_, fit_ellips" + "from matplotlib import pyplot as plt\nfrom numpy import cos, linspace, radians, sin\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates\nfrom py_eddy_tracker.observations.observation import EddiesObservations\nfrom py_eddy_tracker.poly import fit_circle_, fit_ellipse" ] }, { @@ -51,7 +51,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Function to draw circle or ellips from parameter\n\n" + "Function to draw circle or ellipse from parameter\n\n" ] }, { @@ -62,14 +62,14 @@ }, "outputs": [], "source": [ - "def build_circle(x0, y0, r):\n angle = radians(linspace(0, 360, 50))\n x_norm, y_norm = cos(angle), sin(angle)\n return local_to_coordinates(x_norm * r, y_norm * r, x0, y0)\n\n\ndef build_ellips(x0, y0, a, b, theta):\n angle = radians(linspace(0, 360, 50))\n x = a * cos(theta) * cos(angle) - b * sin(theta) * sin(angle)\n y = a * sin(theta) * cos(angle) + b * cos(theta) * sin(angle)\n return local_to_coordinates(x, y, x0, y0)" + "def build_circle(x0, y0, r):\n angle = radians(linspace(0, 360, 50))\n x_norm, y_norm = cos(angle), sin(angle)\n return local_to_coordinates(x_norm * r, y_norm * r, x0, y0)\n\n\ndef build_ellipse(x0, y0, a, b, theta):\n angle = radians(linspace(0, 360, 50))\n x = a * cos(theta) * cos(angle) - b * sin(theta) * sin(angle)\n y = a * sin(theta) * cos(angle) + b * cos(theta) * sin(angle)\n return local_to_coordinates(x, y, x0, y0)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Plot fitted circle or ellips on stored contour\n\n" + "Plot fitted circle or ellipse on stored contour\n\n" ] }, { @@ -80,7 +80,7 @@ }, "outputs": [], "source": [ - "xs, ys = a.contour_lon_s, a.contour_lat_s\n\nfig = plt.figure(figsize=(15, 15))\n\nj = 1\nfor i in range(0, 800, 30):\n x, y = xs[i], ys[i]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n ax = fig.add_subplot(4, 4, j)\n ax.grid(), ax.set_aspect(\"equal\")\n ax.plot(x, y, label=\"store\", color=\"black\")\n x0, y0, a, b, theta = fit_ellips(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_ellips(x0, y0, a, b, theta), label=\"ellips\", color=\"green\")\n x0, y0, radius, shape_error = fit_circle_(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_circle(x0, y0, radius), label=\"circle\", color=\"red\", lw=0.5)\n if j == 16:\n break\n j += 1" + "xs, ys = a.contour_lon_s, a.contour_lat_s\n\nfig = plt.figure(figsize=(15, 15))\n\nj = 1\nfor i in range(0, 800, 30):\n x, y = xs[i], ys[i]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n ax = fig.add_subplot(4, 4, j)\n ax.grid(), ax.set_aspect(\"equal\")\n ax.plot(x, y, label=\"store\", color=\"black\")\n x0, y0, a, b, theta = fit_ellipse(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_ellipse(x0, y0, a, b, theta), label=\"ellipse\", color=\"green\")\n x0, y0, radius, shape_error = fit_circle_(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_circle(x0, y0, radius), label=\"circle\", color=\"red\", lw=0.5)\n if j == 16:\n break\n j += 1" ] } ], @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 30c85a49..28d0048d 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which are not used but consumes same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies(only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle(in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and filled target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particle in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particle from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct" + "def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies (only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle (in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and fill target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particles in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particles from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct" ] }, { @@ -169,7 +169,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.9.2" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb index 743b753f..9b3d40d6 100644 --- a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb +++ b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations" + "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where, array, pi\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.poly import fit_ellipse" ] }, { @@ -230,6 +230,96 @@ "source": [ "ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, \"shape_error_e\", vmin=14, vmax=70, **kw)\ncb = update_axes(ax, m[\"scatter\"])\ncb.set_label(\"Effective shape error\")" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Rotation angle\nFor each obs, fit an ellipse to the contour, with theta the angle from the x-axis,\na the semi ax in x direction and b the semi ax in y dimension\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "theta_ = list()\na_ = list()\nb_ = list()\nfor obs in close_to_i3:\n x, y = obs[\"contour_lon_s\"], obs[\"contour_lat_s\"]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n x0, y0, a, b, theta = fit_ellipse(x_, y_)\n theta_.append(theta)\n a_.append(a)\n b_.append(b)\na_ = array(a_)\nb_ = array(b_)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Theta\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi / 2, vmax=pi / 2, cmap=\"hsv\")\n_ = update_axes(ax, m[\"scatter\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "a\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "b\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "a/b\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, a_ / b_, vmin=1, vmax=2, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])" + ] } ], "metadata": { From 81eaf1574cf0170527367cd21e887b3789b236c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 12 May 2021 14:30:40 +0200 Subject: [PATCH 020/115] add coherence function, and minor corrections add shift_files to GridCollection change heigth to height correct FIXME in function documentation correct tests --- src/py_eddy_tracker/dataset/grid.py | 10 + src/py_eddy_tracker/observations/network.py | 311 +++++++++++++++++- .../observations/observation.py | 2 +- 3 files changed, 308 insertions(+), 15 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 11227475..ea602cfa 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,6 +2264,16 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new + def shift_files(self, t, filename, x_name, y_name, indexs, heigth): + """Add next file to the list and remove the oldest""" + + self.datasets = self.datasets[1:] + + d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) + if heigth is not None: + d.add_uv(heigth) + self.datasets.append((t, d)) + def interp(self, grid_name, t, lons, lats, method="bilinear"): """ Compute z over lons, lats diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 58f926a1..5fe0727a 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -6,6 +6,7 @@ from glob import glob from numba import njit +from numba import types as nb_types from numpy import ( arange, array, @@ -20,13 +21,16 @@ unique, where, zeros, + meshgrid, ) +import zarr from ..generic import build_index, wrap_longitude -from ..poly import bbox_intersection, vertice_overlap +from ..poly import bbox_intersection, vertice_overlap, group_obs from .groups import GroupEddiesObservations, get_missing_indices from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter +from ..dataset.grid import GridCollection logger = logging.getLogger("pet") @@ -97,6 +101,109 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): previous_obs[i_o + 1] = i_o +def advect(x, y, c, t0, delta_t): + """ + Advect particle from t0 to t0 + delta_t, with data cube. + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param int t0: julian day of advection start + :param int delta_t: number of days to advect + """ + + kw = dict(nb_step=6, time_step=86400 / 6) + if delta_t < 0: + kw["backward"] = True + delta_t = -delta_t + p = c.advect(x, y, "u", "v", t_init=t0, **kw) + for _ in range(delta_t): + t, x, y = p.__next__() + return t, x, y + + +def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param NetworkObservations eddies: NetworkObservations considered + :param int t_start: julian day of the advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + :params dict kwargs: dict of params given to `advect` + """ + + # Obs from initial time + m_start = eddies.time == t_start + + e = eddies.extract_with_mask(m_start) + # to be able to get global index + translate_start = where(m_start)[0] + # Identify particle in eddies (only in core) + i_start = e.contains(x, y, intern=True) + m = i_start != -1 + + x, y, i_start = x[m], y[m], i_start[m] + # Advect + t_end, x, y = advect(x, y, c, t_start, **kwargs) + # eddies at last date + m_end = eddies.time == t_end / 86400 + e_end = eddies.extract_with_mask(m_end) + # to be able to get global index + translate_end = where(m_end)[0] + # Id eddies for each alive particle (in core and extern) + i_end = e_end.contains(x, y) + # compute matrix and fill target array + get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) + + +@njit(cache=True) +def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): + """Compute target observation and associated percentages + + :param np.array(int) i_start: indices of associated contours at starting advection day + :param np.array(int) i_end: indices of associated contours after advection + :param np.array(int) translate_start: corresponding global indices at starting advection day + :param np.array(int) translate_end: corresponding global indices after advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + """ + + nb_start, nb_end = translate_start.size, translate_end.size + # Matrix which will store count for every couple + count = zeros((nb_start, nb_end), dtype=nb_types.int32) + # Number of particles in each origin observation + ref = zeros(nb_start, dtype=nb_types.int32) + # For each particle + for i in range(i_start.size): + i_end_ = i_end[i] + i_start_ = i_start[i] + if i_end_ != -1: + count[i_start_, i_end_] += 1 + ref[i_start_] += 1 + for i in range(nb_start): + for j in range(nb_end): + pct_ = count[i, j] + # If there are particles from i to j + if pct_ != 0: + # Get percent + pct_ = pct_ / ref[i] * 100.0 + # Get indices in full dataset + i_, j_ = translate_start[i], translate_end[j] + pct_0 = pct[i_, 0] + if pct_ > pct_0: + pct[i_, 1] = pct_0 + pct[i_, 0] = pct_ + i_target[i_, 1] = i_target[i_, 0] + i_target[i_, 0] = j_ + elif pct_ > pct[i_, 1]: + pct[i_, 1] = pct_ + i_target[i_, 1] = j_ + return i_target, pct + + class NetworkObservations(GroupEddiesObservations): __slots__ = ("_index_network",) @@ -109,17 +216,16 @@ def __init__(self, *args, **kwargs): def find_segments_relative(self, obs, stopped=None, order=1): """ - Find all relative segments linked with merging/splitting events at a specific order. + Find all relative segments from obs linked with merging/splitting events at a specific order. - :param int obs: index of event after the event - :param int stopped: index of event before the event + :param int obs: index of observation after the event + :param int stopped: index of observation before the event :param int order: order of relatives accepted :return: all relative segments :rtype: EddiesObservations """ - # FIXME : double "event" in the description, please clarify (event = chosen obs?) # extraction of network where the event is network_id = self.tracks[obs] @@ -247,23 +353,17 @@ def infos(self, label=""): def correct_close_events(self, nb_days_max=20): """ Transform event where - segment A split to B, then A merge into B + segment A splits from segment B, then x days after segment B merges with A to - segment A split to B, then B merge to A + segment A splits from segment B then x days after segment A merges with B (B will be longer) - these events are filtered with `nb_days_max`, which the event have to take place in less than `nb_days_max` + These events have to last less than `nb_days_max` to be changed. :param float nb_days_max: maximum time to search for splitting-merging event """ - # FIXME : we want to change - # segment A splits from segment B, then x days after segment B merges with A - # to - # segment A splits from segment B then x days after segement A merges with B (B will be longer) - # comments are in the wrong way but the example works as wanted - _time = self.time # segment used to correct and track changes segment = self.segment_track_array.copy() @@ -1340,6 +1440,189 @@ def extract_with_mask(self, mask): new.previous_obs[:] = translate[p] return new + def analysis_coherence( + self, + date_function, + uv_params, + advection_mode="both", + dt_advect=14, + step_mesh=1.0 / 50, + output_name=None, + dissociate_network=False, + correct_close_events=0, + remove_dead_end=0, + ): + + """Global function to analyse segments coherence, with network preprocessing""" + + if dissociate_network: + self.dissociate_network() + + if correct_close_events > 0: + self.correct_close_events(nb_days_max=correct_close_events) + + if remove_dead_end > 0: + network_clean = self.remove_dead_end(nobs=0, ndays=remove_dead_end) + else: + network_clean = self + + res = network_clean.segment_coherence( + date_function=date_function, + uv_params=uv_params, + advection_mode=advection_mode, + output_name=output_name, + dt_advect=dt_advect, + step_mesh=step_mesh, + ) + + return network_clean, res + + def segment_coherence( + self, + date_function, + uv_params, + advection_mode="both", + dt_advect=14, + step_mesh=1.0 / 50, + output_name=None, + ): + + """ + Percentage of particules and their targets after forward or/and backward advection from a specific eddy. + + :param callable date_function: python function, takes as param `int` (julian day) and return + data filename associated to the date + ex: + def date2file(julian_day): + date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) + + return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" + + :param dict uv_params: dict of parameters used by + :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` + :param str advection_mode: "backward", "forward" or "both" + :param int dt_advect: days for advection + :param float step_mesh: step for particule mesh in degrees + :param str output_name: if not None, name of file saved in zarr. Else, data will not be saved + """ + + if advection_mode in ["both", "forward"]: + itf_final = -ones((self.obs.size, 2), dtype="i4") + ptf_final = zeros((self.obs.size, 2), dtype="i1") + + if advection_mode in ["both", "backward"]: + itb_final = -ones((self.obs.size, 2), dtype="i4") + ptb_final = zeros((self.obs.size, 2), dtype="i1") + + for slice_track, b0, _ in self.iter_on(self.track): + if b0 == 0: + continue + + sub_networks = self.network(b0) + + # find extremum to create a mesh of particles + lon = sub_networks.contour_lon_s + lonMin = lon.min() - 0.1 + lonMax = lon.max() + 0.1 + + lat = sub_networks.contour_lat_s + latMin = lat.min() - 0.1 + latMax = lat.max() + 0.1 + + x0, y0 = meshgrid( + arange(lonMin, lonMax, step_mesh), arange(latMin, latMax, step_mesh) + ) + x0, y0 = x0.reshape(-1), y0.reshape(-1) + _, i = group_obs(x0, y0, 1, 360) + x0, y0 = x0[i], y0[i] + + t_start, t_end = sub_networks.period + shape = (sub_networks.obs.size, 2) + + if advection_mode in ["both", "forward"]: + + # first dates to load. + dates = arange(t_start - 1, t_start + dt_advect + 2) + # files associated with dates + first_files = [date_function(x) for x in dates] + + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + + i_target_f = -ones(shape, dtype="i4") + pct_target_f = zeros(shape, dtype="i1") + + for _t in range(t_start, t_end - dt_advect + 1): + t_shift = _t + dt_advect + 2 + + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + x0, + y0, + c, + sub_networks, + _t, + i_target_f, + pct_target_f, + delta_t=dt_advect, + ) + + itf_final[slice_track] = i_target_f + ptf_final[slice_track] = pct_target_f + + if advection_mode in ["both", "backward"]: + + # first dates to load. + dates = arange(t_start - 1, t_start + dt_advect + 2) + # files associated with dates + first_files = [date_function(x) for x in dates] + + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + + i_target_b = -ones(shape, dtype="i4") + pct_target_b = zeros(shape, dtype="i1") + + for _t in range(t_start + dt_advect + 1, t_end + 1): + t_shift = _t + 1 + + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + x0, + y0, + c, + self, + _t, + i_target_b, + pct_target_b, + delta_t=-dt_advect, + ) + + itb_final[slice_track] = i_target_b + ptb_final[slice_track] = pct_target_b + + if output_name is not None: + zg = zarr.open(output_name, "w") + + # zarr compression parameters + params_seg = dict() + params_pct = dict() + + res = [] + if advection_mode in ["forward", "both"]: + res = res + [itf_final, ptf_final] + if output_name is not None: + zg.array("i_target_forward", itf_final, **params_seg) + zg.array("pct_target_forward", ptf_final, **params_pct) + + if advection_mode in ["backward", "both"]: + res = res + [itb_final, ptb_final] + if output_name is not None: + zg.array("i_target_backward", itb_final, **params_seg) + zg.array("pct_target_backward", ptb_final, **params_pct) + + return res + class Network: __slots__ = ( diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 173f6c56..3d91ad42 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2045,7 +2045,7 @@ def is_convex(self, intern=False): def contains(self, x, y, intern=False): """ - Return index of contour which contain (x,y) + Return index of contour containing (x,y) :param array x: longitude :param array y: latitude From 13177b657e3f37b88e734a0aac1c8254d5547fca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Wed, 12 May 2021 14:31:29 +0200 Subject: [PATCH 021/115] correction bug problem when indices is on edge of datas --- src/py_eddy_tracker/dataset/grid.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index ea602cfa..6337e136 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2551,6 +2551,11 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): i1, j1 = i0 + 1, j0 + 1 if nb_x != 0: i1 %= nb_x + i_max, j_max = m.shape + + # if i1 >= i_max or j1 >= j_max: + # return True, nan, nan, nan, nan, nan, nan, nan, nan + if m[i0, j0] or m[i0, j1] or m[i1, j0] or m[i1, j1]: return True, nan, nan, nan, nan, nan, nan, nan, nan # Extract value for u and v From b5c31016a085bd47c7e281cdff5c2a7e0bc7e392 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 14 May 2021 13:48:26 +0200 Subject: [PATCH 022/115] corrections for merge request move particle_candidate in groups add default values for shift_files mistake with get_uv_quad correction still in comments change delta_t to n_days correct whitespaces --- examples/16_network/pet_follow_particle.py | 80 +----------- src/py_eddy_tracker/dataset/grid.py | 6 +- src/py_eddy_tracker/observations/groups.py | 109 ++++++++++++++++- src/py_eddy_tracker/observations/network.py | 128 +++----------------- 4 files changed, 127 insertions(+), 196 deletions(-) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 0c4be55d..b4dfe343 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -17,6 +17,7 @@ from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection from py_eddy_tracker.observations.network import NetworkObservations +from py_eddy_tracker.observations.groups import particle_candidate from py_eddy_tracker.poly import group_obs start_logger().setLevel("ERROR") @@ -124,81 +125,6 @@ def update(frame): ani = VideoAnimation(a.fig, update, frames=arange(20200, 20269, step), interval=200) -# %% -# In which observations are the particle -# -------------------------------------- -def advect(x, y, c, t0, delta_t): - """ - Advect particle from t0 to t0 + delta_t, with data cube. - """ - kw = dict(nb_step=6, time_step=86400 / 6) - if delta_t < 0: - kw["backward"] = True - delta_t = -delta_t - p = c.advect(x, y, "u", "v", t_init=t0, **kw) - for _ in range(delta_t): - t, x, y = p.__next__() - return t, x, y - - -def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): - # Obs from initial time - m_start = eddies.time == t_start - e = eddies.extract_with_mask(m_start) - # to be able to get global index - translate_start = where(m_start)[0] - # Identify particle in eddies (only in core) - i_start = e.contains(x, y, intern=True) - m = i_start != -1 - x, y, i_start = x[m], y[m], i_start[m] - # Advect - t_end, x, y = advect(x, y, c, t_start, **kwargs) - # eddies at last date - m_end = eddies.time == t_end / 86400 - e_end = eddies.extract_with_mask(m_end) - # to be able to get global index - translate_end = where(m_end)[0] - # Id eddies for each alive particle (in core and extern) - i_end = e_end.contains(x, y) - # compute matrix and fill target array - get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) - - -@njit(cache=True) -def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): - nb_start, nb_end = translate_start.size, translate_end.size - # Matrix which will store count for every couple - count = zeros((nb_start, nb_end), dtype=nb_types.int32) - # Number of particles in each origin observation - ref = zeros(nb_start, dtype=nb_types.int32) - # For each particle - for i in range(i_start.size): - i_end_ = i_end[i] - i_start_ = i_start[i] - if i_end_ != -1: - count[i_start_, i_end_] += 1 - ref[i_start_] += 1 - for i in range(nb_start): - for j in range(nb_end): - pct_ = count[i, j] - # If there are particles from i to j - if pct_ != 0: - # Get percent - pct_ = pct_ / ref[i] * 100.0 - # Get indices in full dataset - i_, j_ = translate_start[i], translate_end[j] - pct_0 = pct[i_, 0] - if pct_ > pct_0: - pct[i_, 1] = pct_0 - pct[i_, 0] = pct_ - i_target[i_, 1] = i_target[i_, 0] - i_target[i_, 0] = j_ - elif pct_ > pct[i_, 1]: - pct[i_, 1] = pct_ - i_target[i_, 1] = j_ - return i_target, pct - - # %% # Particle advection # ^^^^^^^^^^^^^^^^^^ @@ -217,12 +143,12 @@ def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): # Forward run i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start, t_end - dt): - particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, delta_t=dt) + particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt) # Backward run i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start + dt, t_end): - particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, delta_t=-dt) + particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt) # %% fig = plt.figure(figsize=(10, 10)) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 6337e136..28fa8526 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,7 +2264,7 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new - def shift_files(self, t, filename, x_name, y_name, indexs, heigth): + def shift_files(self, t, filename, x_name, y_name, indexs=None, heigth=None): """Add next file to the list and remove the oldest""" self.datasets = self.datasets[1:] @@ -2553,8 +2553,8 @@ def get_uv_quad(i0, j0, u, v, m, nb_x=0): i1 %= nb_x i_max, j_max = m.shape - # if i1 >= i_max or j1 >= j_max: - # return True, nan, nan, nan, nan, nan, nan, nan, nan + if i1 >= i_max or j1 >= j_max: + return True, nan, nan, nan, nan, nan, nan, nan, nan if m[i0, j0] or m[i0, j1] or m[i1, j0] or m[i1, j1]: return True, nan, nan, nan, nan, nan, nan, nan, nan diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index bd8ac81d..835101ff 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -1,8 +1,8 @@ import logging from abc import ABC, abstractmethod -from numba import njit -from numpy import arange, int32, interp, median, zeros +from numba import njit, types as nb_types +from numpy import arange, int32, interp, median, zeros, where from .observation import EddiesObservations @@ -65,6 +65,111 @@ def get_missing_indices( return indices + +def advect(x, y, c, t0, n_days): + """ + Advect particle from t0 to t0 + n_days, with data cube. + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param int t0: julian day of advection start + :param int n_days: number of days to advect + """ + + kw = dict(nb_step=6, time_step=86400 / 6) + if n_days < 0: + kw["backward"] = True + n_days = -n_days + p = c.advect(x, y, "u", "v", t_init=t0, **kw) + for _ in range(n_days): + t, x, y = p.__next__() + return t, x, y + + +def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages + + :param np.array(float) x: longitude of particles + :param np.array(float) y: latitude of particles + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param GroupEddiesObservations eddies: GroupEddiesObservations considered + :param int t_start: julian day of the advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + :params dict kwargs: dict of params given to `advect` + + """ + + # Obs from initial time + m_start = eddies.time == t_start + + e = eddies.extract_with_mask(m_start) + # to be able to get global index + translate_start = where(m_start)[0] + # Identify particle in eddies (only in core) + i_start = e.contains(x, y, intern=True) + m = i_start != -1 + + x, y, i_start = x[m], y[m], i_start[m] + # Advect + t_end, x, y = advect(x, y, c, t_start, **kwargs) + # eddies at last date + m_end = eddies.time == t_end / 86400 + e_end = eddies.extract_with_mask(m_end) + # to be able to get global index + translate_end = where(m_end)[0] + # Id eddies for each alive particle (in core and extern) + i_end = e_end.contains(x, y) + # compute matrix and fill target array + get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) + + +@njit(cache=True) +def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): + """Compute target observation and associated percentages + + :param np.array(int) i_start: indices of associated contours at starting advection day + :param np.array(int) i_end: indices of associated contours after advection + :param np.array(int) translate_start: corresponding global indices at starting advection day + :param np.array(int) translate_end: corresponding global indices after advection + :param np.array(int) i_target: corresponding obs where particles are advected + :param np.array(int) pct: corresponding percentage of avected particles + """ + + nb_start, nb_end = translate_start.size, translate_end.size + # Matrix which will store count for every couple + count = zeros((nb_start, nb_end), dtype=nb_types.int32) + # Number of particles in each origin observation + ref = zeros(nb_start, dtype=nb_types.int32) + # For each particle + for i in range(i_start.size): + i_end_ = i_end[i] + i_start_ = i_start[i] + if i_end_ != -1: + count[i_start_, i_end_] += 1 + ref[i_start_] += 1 + for i in range(nb_start): + for j in range(nb_end): + pct_ = count[i, j] + # If there are particles from i to j + if pct_ != 0: + # Get percent + pct_ = pct_ / ref[i] * 100.0 + # Get indices in full dataset + i_, j_ = translate_start[i], translate_end[j] + pct_0 = pct[i_, 0] + if pct_ > pct_0: + pct[i_, 1] = pct_0 + pct[i_, 0] = pct_ + i_target[i_, 1] = i_target[i_, 0] + i_target[i_, 0] = j_ + elif pct_ > pct[i_, 1]: + pct[i_, 1] = pct_ + i_target[i_, 1] = j_ + return i_target, pct + + class GroupEddiesObservations(EddiesObservations, ABC): @abstractmethod def fix_next_previous_obs(self): diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 5fe0727a..7292e7c2 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -27,7 +27,7 @@ from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap, group_obs -from .groups import GroupEddiesObservations, get_missing_indices +from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter from ..dataset.grid import GridCollection @@ -101,109 +101,6 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): previous_obs[i_o + 1] = i_o -def advect(x, y, c, t0, delta_t): - """ - Advect particle from t0 to t0 + delta_t, with data cube. - - :param np.array(float) x: longitude of particles - :param np.array(float) y: latitude of particles - :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles - :param int t0: julian day of advection start - :param int delta_t: number of days to advect - """ - - kw = dict(nb_step=6, time_step=86400 / 6) - if delta_t < 0: - kw["backward"] = True - delta_t = -delta_t - p = c.advect(x, y, "u", "v", t_init=t0, **kw) - for _ in range(delta_t): - t, x, y = p.__next__() - return t, x, y - - -def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): - """Select particles within eddies, advect them, return target observation and associated percentages - - :param np.array(float) x: longitude of particles - :param np.array(float) y: latitude of particles - :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles - :param NetworkObservations eddies: NetworkObservations considered - :param int t_start: julian day of the advection - :param np.array(int) i_target: corresponding obs where particles are advected - :param np.array(int) pct: corresponding percentage of avected particles - :params dict kwargs: dict of params given to `advect` - """ - - # Obs from initial time - m_start = eddies.time == t_start - - e = eddies.extract_with_mask(m_start) - # to be able to get global index - translate_start = where(m_start)[0] - # Identify particle in eddies (only in core) - i_start = e.contains(x, y, intern=True) - m = i_start != -1 - - x, y, i_start = x[m], y[m], i_start[m] - # Advect - t_end, x, y = advect(x, y, c, t_start, **kwargs) - # eddies at last date - m_end = eddies.time == t_end / 86400 - e_end = eddies.extract_with_mask(m_end) - # to be able to get global index - translate_end = where(m_end)[0] - # Id eddies for each alive particle (in core and extern) - i_end = e_end.contains(x, y) - # compute matrix and fill target array - get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) - - -@njit(cache=True) -def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): - """Compute target observation and associated percentages - - :param np.array(int) i_start: indices of associated contours at starting advection day - :param np.array(int) i_end: indices of associated contours after advection - :param np.array(int) translate_start: corresponding global indices at starting advection day - :param np.array(int) translate_end: corresponding global indices after advection - :param np.array(int) i_target: corresponding obs where particles are advected - :param np.array(int) pct: corresponding percentage of avected particles - """ - - nb_start, nb_end = translate_start.size, translate_end.size - # Matrix which will store count for every couple - count = zeros((nb_start, nb_end), dtype=nb_types.int32) - # Number of particles in each origin observation - ref = zeros(nb_start, dtype=nb_types.int32) - # For each particle - for i in range(i_start.size): - i_end_ = i_end[i] - i_start_ = i_start[i] - if i_end_ != -1: - count[i_start_, i_end_] += 1 - ref[i_start_] += 1 - for i in range(nb_start): - for j in range(nb_end): - pct_ = count[i, j] - # If there are particles from i to j - if pct_ != 0: - # Get percent - pct_ = pct_ / ref[i] * 100.0 - # Get indices in full dataset - i_, j_ = translate_start[i], translate_end[j] - pct_0 = pct[i_, 0] - if pct_ > pct_0: - pct[i_, 1] = pct_0 - pct[i_, 0] = pct_ - i_target[i_, 1] = i_target[i_, 0] - i_target[i_, 0] = j_ - elif pct_ > pct[i_, 1]: - pct[i_, 1] = pct_ - i_target[i_, 1] = j_ - return i_target, pct - - class NetworkObservations(GroupEddiesObservations): __slots__ = ("_index_network",) @@ -221,12 +118,10 @@ def find_segments_relative(self, obs, stopped=None, order=1): :param int obs: index of observation after the event :param int stopped: index of observation before the event :param int order: order of relatives accepted - :return: all relative segments :rtype: EddiesObservations """ - # extraction of network where the event is network_id = self.tracks[obs] nw = self.network(network_id) @@ -1491,19 +1386,24 @@ def segment_coherence( Percentage of particules and their targets after forward or/and backward advection from a specific eddy. :param callable date_function: python function, takes as param `int` (julian day) and return - data filename associated to the date - ex: - def date2file(julian_day): - date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) - - return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" - + data filename associated to the date (see note) :param dict uv_params: dict of parameters used by :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` :param str advection_mode: "backward", "forward" or "both" :param int dt_advect: days for advection :param float step_mesh: step for particule mesh in degrees :param str output_name: if not None, name of file saved in zarr. Else, data will not be saved + :return: list of 2 or 4 array (depending if forward, backward or both) with segment matchs, and percents + + .. note:: the param `date_function` should be something like : + + .. code-block:: python + + def date2file(julian_day): + date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) + + return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" + """ if advection_mode in ["both", "forward"]: @@ -1591,7 +1491,7 @@ def date2file(julian_day): x0, y0, c, - self, + sub_networks, _t, i_target_b, pct_target_b, From 006adc5f0b300e93a84333198945f91d4fb2cb0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 14 May 2021 13:48:53 +0200 Subject: [PATCH 023/115] correction for documentation --- src/py_eddy_tracker/generic.py | 5 +++-- src/py_eddy_tracker/observations/network.py | 6 +++--- src/py_eddy_tracker/poly.py | 1 + 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 530c2136..283b4b9e 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -70,8 +70,9 @@ def build_index(groups): :param array groups: array that contains groups to be separated :return: (first_index of each group, last_index of each group, value to shift groups) :rtype: (array, array, int) - Examples - -------- + + :Example: + >>> build_index(array((1, 1, 3, 4, 4))) (array([0, 2, 2, 3]), array([2, 2, 3, 5]), 1) """ diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 7292e7c2..685b3e42 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -221,7 +221,7 @@ def from_split_network(cls, group_dataset, indexs, **kwargs): :param TrackEddiesObservations group_dataset: Group dataset :param indexs: result from split_network - return NetworkObservations + :return: NetworkObservations """ index_order = indexs.argsort(order=("group", "track", "time")) network = cls.new_like(group_dataset, len(group_dataset), **kwargs) @@ -1598,8 +1598,8 @@ def group_translator(nb, duos): :param int nb: size of translator :param set((int, int)) duos: set of all groups that must be joined - Examples - -------- + :Example: + >>> NetworkObservations.group_translator(5, ((0, 1), (0, 2), (1, 3))) [3, 3, 3, 3, 5] """ diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index fd4ae9c4..fc36185b 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -717,6 +717,7 @@ def visvalingam(x, y, fixed_size=18): """Polygon simplification with visvalingam algorithm X, Y are considered like a polygon, the next point after the last one is the first one + :param array x: :param array y: :param int fixed_size: array size of out From b3f66bcf5faf2379e62c72cdfb5e07a62ddbf56e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Sun, 16 May 2021 15:34:43 +0200 Subject: [PATCH 024/115] minor corrections sorting imports adding notebooks for binder adding kwargs to shift_files correction of new warnings done --- examples/16_network/pet_follow_particle.py | 6 +- examples/16_network/pet_ioannou_2017_case.py | 5 +- .../01_general_things/pet_storage.ipynb | 2 +- .../pet_contour_circle.ipynb | 2 +- .../pet_display_id.ipynb | 2 +- .../pet_eddy_detection.ipynb | 2 +- .../pet_eddy_detection_ACC.ipynb | 2 +- .../pet_eddy_detection_gulf_stream.ipynb | 2 +- .../pet_filter_and_detection.ipynb | 2 +- .../pet_interp_grid_on_dataset.ipynb | 2 +- .../pet_radius_vs_area.ipynb | 2 +- .../pet_shape_gallery.ipynb | 2 +- .../pet_sla_and_adt.ipynb | 2 +- .../06_grid_manipulation/pet_advect.ipynb | 2 +- .../06_grid_manipulation/pet_filter.ipynb | 2 +- .../pet_hide_pixel_out_eddies.ipynb | 2 +- .../06_grid_manipulation/pet_lavd.ipynb | 2 +- .../pet_okubo_weiss.ipynb | 2 +- .../07_cube_manipulation/pet_cube.ipynb | 2 +- .../07_cube_manipulation/pet_fsle_med.ipynb | 4 +- .../pet_lavd_detection.ipynb | 2 +- .../pet_display_field.ipynb | 2 +- .../pet_display_track.ipynb | 2 +- .../pet_one_track.ipynb | 2 +- .../pet_run_a_tracking.ipynb | 2 +- .../pet_select_track_across_area.ipynb | 2 +- .../pet_track_anim.ipynb | 2 +- .../pet_track_anim_matplotlib_animation.ipynb | 2 +- .../pet_birth_and_death.ipynb | 2 +- .../pet_center_count.ipynb | 2 +- .../pet_geographic_stats.ipynb | 2 +- .../10_tracking_diagnostics/pet_groups.ipynb | 2 +- .../10_tracking_diagnostics/pet_histo.ipynb | 2 +- .../pet_lifetime.ipynb | 2 +- .../pet_normalised_lifetime.ipynb | 2 +- .../pet_pixel_used.ipynb | 2 +- .../pet_propagation.ipynb | 2 +- .../pet_SST_collocation.ipynb | 2 +- .../14_generic_tools/pet_fit_contour.ipynb | 2 +- .../14_generic_tools/pet_visvalingam.ipynb | 2 +- .../python_module/16_network/pet_atlas.ipynb | 2 +- .../16_network/pet_follow_particle.ipynb | 24 +------ .../16_network/pet_group_anim.ipynb | 2 +- .../16_network/pet_ioannou_2017_case.ipynb | 4 +- .../16_network/pet_relative.ipynb | 2 +- .../16_network/pet_replay_segmentation.ipynb | 2 +- .../16_network/pet_segmentation_anim.ipynb | 2 +- .../16_network/pet_something_cool.ipynb | 65 +++++++++++++++++++ src/py_eddy_tracker/dataset/grid.py | 4 +- src/py_eddy_tracker/observations/groups.py | 6 +- src/py_eddy_tracker/observations/network.py | 8 +-- 51 files changed, 127 insertions(+), 83 deletions(-) create mode 100644 notebooks/python_module/16_network/pet_something_cool.ipynb diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index b4dfe343..e5451daa 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -8,16 +8,14 @@ from matplotlib import colors from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation -from numba import njit -from numba import types as nb_types -from numpy import arange, meshgrid, ones, unique, where, zeros +from numpy import arange, meshgrid, ones, unique, zeros from py_eddy_tracker import start_logger from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.dataset.grid import GridCollection -from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.observations.groups import particle_candidate +from py_eddy_tracker.observations.network import NetworkObservations from py_eddy_tracker.poly import group_obs start_logger().setLevel("ERROR") diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index 768f0c88..bbe26e3f 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -14,14 +14,13 @@ from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation from matplotlib.ticker import FuncFormatter -from numpy import arange, where, array, pi +from numpy import arange, array, pi, where from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.generic import coordinates_to_local from py_eddy_tracker.gui import GUI_AXES from py_eddy_tracker.observations.network import NetworkObservations - -from py_eddy_tracker.generic import coordinates_to_local from py_eddy_tracker.poly import fit_ellipse # %% diff --git a/notebooks/python_module/01_general_things/pet_storage.ipynb b/notebooks/python_module/01_general_things/pet_storage.ipynb index fa8d1a55..a56e4def 100644 --- a/notebooks/python_module/01_general_things/pet_storage.ipynb +++ b/notebooks/python_module/01_general_things/pet_storage.ipynb @@ -230,7 +230,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb b/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb index 36989357..2d924387 100644 --- a/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_contour_circle.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb b/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb index 6d40974f..d59f9e15 100644 --- a/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_display_id.ipynb @@ -129,7 +129,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb index fb6c17f8..7469b034 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection.ipynb @@ -291,7 +291,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb index c2a3648d..6ac75cee 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_ACC.ipynb @@ -161,7 +161,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb index c39bc011..49024327 100644 --- a/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_eddy_detection_gulf_stream.ipynb @@ -273,7 +273,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb b/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb index 63e763ff..381aa8f6 100644 --- a/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_filter_and_detection.ipynb @@ -176,7 +176,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb b/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb index 94e61b30..0cfdc9a8 100644 --- a/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_interp_grid_on_dataset.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb b/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb index c70f7dd6..03eba8bf 100644 --- a/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_radius_vs_area.ipynb @@ -107,7 +107,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb b/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb index ffa58c1f..0ef03f6f 100644 --- a/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_shape_gallery.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb b/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb index efbfcc76..9b8b3951 100644 --- a/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb +++ b/notebooks/python_module/02_eddy_identification/pet_sla_and_adt.ipynb @@ -223,7 +223,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb index b660df52..bceed074 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb @@ -262,7 +262,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb b/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb index 74a266c2..2d6a7d3a 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_filter.ipynb @@ -215,7 +215,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb b/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb index c9bca31e..f30076fa 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_hide_pixel_out_eddies.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb index 67983cec..a5ca088c 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb b/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb index b410be0a..ca4998ee 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_okubo_weiss.ipynb @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb index a8ed7f1b..22cf3158 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb @@ -158,7 +158,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb index 4f2e1467..a90c3b9f 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb @@ -33,7 +33,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" + "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb index f4e5f77e..bd197c57 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb @@ -194,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb index bf924b36..6e43e9a4 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_display_field.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb index 1af7b49a..c98e53f0 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_display_track.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb index 2749f7e9..95595a7a 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_one_track.ipynb @@ -93,7 +93,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb index e8871283..d0a2e5b0 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_run_a_tracking.ipynb @@ -154,7 +154,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb index 5ba0d481..8e64b680 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_select_track_across_area.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb index 041c8987..65768145 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb index 9f77dbae..6d7fcc2e 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb @@ -93,7 +93,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb index d9a2ef2b..635c6b5a 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_birth_and_death.ipynb @@ -144,7 +144,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb index b6bb15bd..753cd625 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_center_count.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb index 3e884552..df495703 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_geographic_stats.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb index 85e32c6a..9f06e010 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_groups.ipynb @@ -136,7 +136,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb index 851c6ca4..81809d8b 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_histo.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb index 4a3ff0af..ed8c0295 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_lifetime.ipynb @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb index 867e081f..a53f2d3a 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb index 81bed372..23f830d6 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_pixel_used.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb index e0d1f2d2..9792f8f4 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_propagation.ipynb @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb b/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb index 05b0413c..b30682a1 100644 --- a/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb +++ b/notebooks/python_module/12_external_data/pet_SST_collocation.ipynb @@ -226,7 +226,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb index 5306fa0c..a46a7e22 100644 --- a/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb +++ b/notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb @@ -100,7 +100,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb b/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb index 0183abde..69e49b57 100644 --- a/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb +++ b/notebooks/python_module/14_generic_tools/pet_visvalingam.ipynb @@ -75,7 +75,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_atlas.ipynb b/notebooks/python_module/16_network/pet_atlas.ipynb index ee8f1934..31e3580f 100644 --- a/notebooks/python_module/16_network/pet_atlas.ipynb +++ b/notebooks/python_module/16_network/pet_atlas.ipynb @@ -363,7 +363,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 28d0048d..6be13adf 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numba import njit\nfrom numba import types as nb_types\nfrom numpy import arange, meshgrid, ones, unique, where, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import group_obs\n\nstart_logger().setLevel(\"ERROR\")" + "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, ones, unique, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.groups import particle_candidate\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import group_obs\n\nstart_logger().setLevel(\"ERROR\")" ] }, { @@ -105,24 +105,6 @@ "cmap = colors.ListedColormap(list(n.COLORS), name=\"from_list\", N=n.segment.max() + 1)\na = Anim(\n n,\n intern=False,\n figsize=(12, 6),\n nb_step=1,\n dpi=60,\n field_color=\"segment\",\n field_txt=\"segment\",\n cmap=cmap,\n)\na.fig.suptitle(\"\"), a.ax.set_xlim(24, 36), a.ax.set_ylim(30, 36)\na.txt.set_position((25, 31))\n\nstep = 0.25\nkw_p = dict(nb_step=2, time_step=86400 * step * 0.5, t_init=t_snapshot - 2 * step)\n\nmappables = dict()\nparticules = c.advect(x, y, \"u\", \"v\", **kw_p)\nfilament = c.filament(x_f, y_f, \"u\", \"v\", **kw_p, filament_size=3)\nkw = dict(ls=\"\", marker=\".\", markersize=0.25)\nfor k in index_:\n m = k == index\n mappables[k] = a.ax.plot([], [], color=cmap(k), **kw)[0]\nm_filament = a.ax.plot([], [], lw=0.25, color=\"gray\")[0]\n\n\ndef update(frame):\n tt, xt, yt = particules.__next__()\n for k, mappable in mappables.items():\n m = index == k\n mappable.set_data(xt[m], yt[m])\n tt, xt, yt = filament.__next__()\n m_filament.set_data(xt, yt)\n if frame % 1 == 0:\n a.func_animation(frame)\n\n\nani = VideoAnimation(a.fig, update, frames=arange(20200, 20269, step), interval=200)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## In which observations are the particle\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies (only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle (in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and fill target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particles in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particles from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -138,7 +120,7 @@ }, "outputs": [], "source": [ - "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, delta_t=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, delta_t=-dt)" + "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" ] }, { @@ -169,7 +151,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_group_anim.ipynb b/notebooks/python_module/16_network/pet_group_anim.ipynb index ffb9dd17..7129259c 100644 --- a/notebooks/python_module/16_network/pet_group_anim.ipynb +++ b/notebooks/python_module/16_network/pet_group_anim.ipynb @@ -205,7 +205,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb index 9b3d40d6..788e94ca 100644 --- a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb +++ b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where, array, pi\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.poly import fit_ellipse" + "import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, array, pi, where\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import fit_ellipse" ] }, { @@ -338,7 +338,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_relative.ipynb b/notebooks/python_module/16_network/pet_relative.ipynb index cee4010a..9f3fd3d9 100644 --- a/notebooks/python_module/16_network/pet_relative.ipynb +++ b/notebooks/python_module/16_network/pet_relative.ipynb @@ -539,7 +539,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb index 48f4955b..7c632138 100644 --- a/notebooks/python_module/16_network/pet_replay_segmentation.ipynb +++ b/notebooks/python_module/16_network/pet_replay_segmentation.ipynb @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index ae36381c..05c68873 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -147,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.2" + "version": "3.7.9" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_something_cool.ipynb b/notebooks/python_module/16_network/pet_something_cool.ipynb new file mode 100644 index 00000000..158852f9 --- /dev/null +++ b/notebooks/python_module/16_network/pet_something_cool.ipynb @@ -0,0 +1,65 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# essai\n\non tente des trucs\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import cartopy.crs as ccrs\nimport cartopy.feature as cfeature\nimport numpy as np\nfrom matplotlib import pyplot as plt\n\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\n\ndef rect_from_extent(extent):\n rect_lon = [extent[0], extent[1], extent[1], extent[0], extent[0]]\n rect_lat = [extent[2], extent[2], extent[3], extent[3], extent[2]]\n return rect_lon, rect_lat\n\n\ndef indice_from_extent(lon, lat, extent):\n mask = (lon > extent[0]) * (lon < extent[1]) * (lat > extent[2]) * (lat < extent[3])\n return np.where(mask)[0]\n\n\nfichier = \"/data/adelepoulle/work/Eddies/20201217_network_build/big_network.nc\"\nnetwork = NetworkObservations.load_file(fichier)\nsub_network = network.network(1078566)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# extent_begin = [0, 2, -50, -15]\n# extent_end = [-42, -35, -40, -10]\n\nextent_begin = [2, 22, -50, -30]\ni_obs_begin = indice_from_extent(\n sub_network.longitude, sub_network.latitude, extent_begin\n)\nnetwork_begin = sub_network.find_link(i_obs_begin)\ntime_mini = network_begin.time.min()\ntime_maxi = network_begin.time.max()\n\nextent_end = [-52, -45, -37, -33]\ni_obs_end = indice_from_extent(\n (network_begin.longitude + 180) % 360 - 180, network_begin.latitude, extent_end\n)\nnetwork_end = network_begin.find_link(i_obs_end, forward=False, backward=True)\n\n\ndatasets = [network_begin, network_end]\nextents = [extent_begin, extent_end]\nfig, (ax1, ax2) = plt.subplots(\n 2, 1, figsize=(10, 9), dpi=140, subplot_kw={\"projection\": ccrs.PlateCarree()}\n)\n\nfor ax, dataset, extent in zip([ax1, ax2], datasets, extents):\n sca = dataset.scatter(\n ax,\n name=\"time\",\n cmap=\"Spectral_r\",\n label=\"observation dans le temps\",\n vmin=time_mini,\n vmax=time_maxi,\n )\n\n x, y = rect_from_extent(extent)\n ax.fill(x, y, color=\"grey\", alpha=0.3, label=\"observations choisies\")\n # ax.plot(x, y, marker='o')\n\n ax.legend()\n\n gridlines = ax.gridlines(\n alpha=0.2, color=\"black\", linestyle=\"dotted\", draw_labels=True, dms=True\n )\n\n gridlines.left_labels = False\n gridlines.top_labels = False\n\n ax.coastlines()\n ax.add_feature(cfeature.LAND)\n ax.add_feature(cfeature.LAKES, zorder=10)\n ax.add_feature(cfeature.BORDERS, lw=0.25)\n ax.add_feature(cfeature.OCEAN, alpha=0.2)\n\n\nax1.set_title(\n \"Recherche du d\u00e9placement de l'eau dans les eddies \u00e0 travers les observations choisies\"\n)\nax2.set_title(\"Recherche de la provenance de l'eau \u00e0 travers les observations choisies\")\nax2.set_extent(ax1.get_extent(), ccrs.PlateCarree())\n\nfig.subplots_adjust(right=0.87, left=0.02)\ncbar_ax = fig.add_axes([0.90, 0.1, 0.02, 0.8])\ncbar = fig.colorbar(sca[\"scatter\"], cax=cbar_ax, orientation=\"vertical\")\n_ = cbar.set_label(\"time (jj)\", rotation=270, labelpad=-65)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.9" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 28fa8526..bd9e70d3 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2264,12 +2264,12 @@ def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None new.datasets.append((t, d)) return new - def shift_files(self, t, filename, x_name, y_name, indexs=None, heigth=None): + def shift_files(self, t, filename, heigth=None, **rgd_kwargs): """Add next file to the list and remove the oldest""" self.datasets = self.datasets[1:] - d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) + d = RegularGridDataset(filename, **rgd_kwargs) if heigth is not None: d.add_uv(heigth) self.datasets.append((t, d)) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 835101ff..c0924cb3 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -1,8 +1,9 @@ import logging from abc import ABC, abstractmethod -from numba import njit, types as nb_types -from numpy import arange, int32, interp, median, zeros, where +from numba import njit +from numba import types as nb_types +from numpy import arange, int32, interp, median, where, zeros from .observation import EddiesObservations @@ -65,7 +66,6 @@ def get_missing_indices( return indices - def advect(x, y, c, t0, n_days): """ Advect particle from t0 to t0 + n_days, with data cube. diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 685b3e42..0e5b9576 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -5,6 +5,7 @@ import logging from glob import glob +import zarr from numba import njit from numba import types as nb_types from numpy import ( @@ -15,22 +16,21 @@ concatenate, empty, in1d, + meshgrid, ones, uint16, uint32, unique, where, zeros, - meshgrid, ) -import zarr +from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude -from ..poly import bbox_intersection, vertice_overlap, group_obs +from ..poly import bbox_intersection, group_obs, vertice_overlap from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter -from ..dataset.grid import GridCollection logger = logging.getLogger("pet") From d76da6ac1af0a0d5ab37d9d2c11c5ebba2dfc71e Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 17 May 2021 21:20:17 +0200 Subject: [PATCH 025/115] Add test to solve one problem of #88 --- src/py_eddy_tracker/observations/observation.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 3d91ad42..0dc4ed69 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2056,7 +2056,8 @@ def contains(self, x, y, intern=False): xname, yname = self.intern(intern) m = ~(isnan(x) + isnan(y)) i = -ones(x.shape, dtype="i4") - i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) + if x.size != 0: + i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) return i def inside(self, x, y, intern=False): From efb8faa9b6157b60f0017c4502f41de75e38a992 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Thu, 20 May 2021 15:35:12 +0200 Subject: [PATCH 026/115] Solve issue #88 --- src/py_eddy_tracker/generic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 283b4b9e..a48c0e2f 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -89,8 +89,7 @@ def build_index(groups): first_index[group - i0 + 1 : next_group - i0 + 1] = i + 1 last_index = zeros(amplitude, dtype=numba_types.int_) last_index[:-1] = first_index[1:] - # + 2 because we iterate only until -2 and we want upper bound ( 1 + 1) - last_index[-1] = i + 2 + last_index[-1] = len(groups) return first_index, last_index, i0 From 3d705bec3cc46969c62f6416b1ae64061d9b6da3 Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Wed, 26 May 2021 10:16:42 +0200 Subject: [PATCH 027/115] - change parameter in particle_candidate - add test for mask in contains - minor english --- src/py_eddy_tracker/observations/network.py | 4 ++-- src/py_eddy_tracker/observations/observation.py | 3 ++- src/py_eddy_tracker/poly.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 0e5b9576..4f9af0b3 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1464,7 +1464,7 @@ def date2file(julian_day): _t, i_target_f, pct_target_f, - delta_t=dt_advect, + n_days=dt_advect, ) itf_final[slice_track] = i_target_f @@ -1495,7 +1495,7 @@ def date2file(julian_day): _t, i_target_b, pct_target_b, - delta_t=-dt_advect, + n_days=-dt_advect, ) itb_final[slice_track] = i_target_b diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 0dc4ed69..054ba81a 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2056,7 +2056,8 @@ def contains(self, x, y, intern=False): xname, yname = self.intern(intern) m = ~(isnan(x) + isnan(y)) i = -ones(x.shape, dtype="i4") - if x.size != 0: + + if x.size != 0 and True in m: i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) return i diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index fc36185b..0f0271ee 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -854,8 +854,8 @@ def poly_indexs(x_p, y_p, x_c, y_c): """ Index of contour for each postion inside a contour, -1 in case of no contour - :param array x_p: longitude to test (must be define, no nan) - :param array y_p: latitude to test (must be define, no nan) + :param array x_p: longitude to test (must be defined, no nan) + :param array y_p: latitude to test (must be defined, no nan) :param array x_c: longitude of contours :param array y_c: latitude of contours """ From 918fbf2529e2aa87e6d68fce95745b4add3e6a42 Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Wed, 26 May 2021 11:17:41 +0200 Subject: [PATCH 028/115] correction pull request --- src/py_eddy_tracker/observations/observation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 054ba81a..557c0279 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2057,7 +2057,7 @@ def contains(self, x, y, intern=False): m = ~(isnan(x) + isnan(y)) i = -ones(x.shape, dtype="i4") - if x.size != 0 and True in m: + if x.size != 0 and m.any(): i[m] = poly_indexs(x[m], y[m], self[xname], self[yname]) return i From 0d9980802a6e995284d4dc65608a8fc908a16988 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 31 May 2021 22:01:28 +0200 Subject: [PATCH 029/115] replace log2 by log in fsle --- examples/07_cube_manipulation/pet_fsle_med.py | 6 ++--- .../07_cube_manipulation/pet_fsle_med.ipynb | 24 +++++++++---------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index b4a51265..cc221cf7 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -14,7 +14,7 @@ from matplotlib import pyplot as plt from numba import njit -from numpy import arange, arctan2, empty, isnan, log2, ma, meshgrid, ones, pi, zeros +from numpy import arange, arctan2, empty, isnan, log, ma, meshgrid, ones, pi, zeros from py_eddy_tracker import start_logger from py_eddy_tracker.data import get_demo_path @@ -71,7 +71,7 @@ def check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6): s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * ( (dxn - dye) ** 2 + (dxe + dyn) ** 2 ) - flse[i] = 1 / (2 * dt) * log2(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5)) + flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5)) theta[i] = arctan2(at1, at2 + s2) * 180 / pi # To know where value are set m_set[i] = False @@ -180,7 +180,7 @@ def build_triplet(x, y, step=0.02): ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46) ax.set_aspect("equal") ax.set_title("Finite size lyapunov exponent", weight="bold") -kw = dict(cmap="viridis_r", vmin=-15, vmax=0) +kw = dict(cmap="viridis_r", vmin=-20, vmax=0) m = fsle_custom.display(ax, 1 / fsle_custom.grid("fsle"), **kw) ax.grid() _ = plt.colorbar(m, cax=fig.add_axes([0.94, 0.05, 0.01, 0.9])) diff --git a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb index a90c3b9f..8ee136b3 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# FSLE experiment in med\n\nExample to build Finite Size Lyapunov Exponents, parameter values must be adapted for your case.\n\nExample use a method similar to `AVISO flse`_\n\n https://www.aviso.altimetry.fr/en/data/products/value-added-products/\n fsle-finite-size-lyapunov-exponents/fsle-description.html\n" + "\nFSLE experiment in med\n======================\n\nExample to build Finite Size Lyapunov Exponents, parameter values must be adapted for your case.\n\nExample use a method similar to `AVISO flse`_\n\n https://www.aviso.altimetry.fr/en/data/products/value-added-products/\n fsle-finite-size-lyapunov-exponents/fsle-description.html\n" ] }, { @@ -26,14 +26,14 @@ }, "outputs": [], "source": [ - "from matplotlib import pyplot as plt\nfrom numba import njit\nfrom numpy import arange, arctan2, empty, isnan, log2, ma, meshgrid, ones, pi, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection, RegularGridDataset\n\nstart_logger().setLevel(\"ERROR\")" + "from matplotlib import pyplot as plt\nfrom numba import njit\nfrom numpy import arange, arctan2, empty, isnan, log, ma, meshgrid, ones, pi, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection, RegularGridDataset\n\nstart_logger().setLevel(\"ERROR\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" + "ADT in med\n----------\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" ] }, { @@ -51,7 +51,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Methods to compute FSLE\n\n" + "Methods to compute FSLE\n-----------------------\n\n" ] }, { @@ -62,14 +62,14 @@ }, "outputs": [], "source": [ - "@njit(cache=True, fastmath=True)\ndef check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6):\n \"\"\"\n Check if distance between eastern or northern particle to center particle is bigger than `dist_max`\n \"\"\"\n nb_p = x.shape[0] // 3\n delta = dist_max ** 2\n for i in range(nb_p):\n i0 = i * 3\n i_n = i0 + 1\n i_e = i0 + 2\n # If particle already set, we skip\n if m[i0] or m[i_n] or m[i_e]:\n continue\n # Distance with north\n dxn, dyn = x[i0] - x[i_n], y[i0] - y[i_n]\n dn = dxn ** 2 + dyn ** 2\n # Distance with east\n dxe, dye = x[i0] - x[i_e], y[i0] - y[i_e]\n de = dxe ** 2 + dye ** 2\n\n if dn >= delta or de >= delta:\n s1 = dn + de\n at1 = 2 * (dxe * dxn + dye * dyn)\n at2 = de - dn\n s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * (\n (dxn - dye) ** 2 + (dxe + dyn) ** 2\n )\n flse[i] = 1 / (2 * dt) * log2(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5))\n theta[i] = arctan2(at1, at2 + s2) * 180 / pi\n # To know where value are set\n m_set[i] = False\n # To stop particle advection\n m[i0], m[i_n], m[i_e] = True, True, True\n\n\n@njit(cache=True)\ndef build_triplet(x, y, step=0.02):\n \"\"\"\n Triplet building for each position we add east and north point with defined step\n \"\"\"\n nb_x = x.shape[0]\n x_ = empty(nb_x * 3, dtype=x.dtype)\n y_ = empty(nb_x * 3, dtype=y.dtype)\n for i in range(nb_x):\n i0 = i * 3\n i_n, i_e = i0 + 1, i0 + 2\n x__, y__ = x[i], y[i]\n x_[i0], y_[i0] = x__, y__\n x_[i_n], y_[i_n] = x__, y__ + step\n x_[i_e], y_[i_e] = x__ + step, y__\n return x_, y_" + "@njit(cache=True, fastmath=True)\ndef check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6):\n \"\"\"\n Check if distance between eastern or northern particle to center particle is bigger than `dist_max`\n \"\"\"\n nb_p = x.shape[0] // 3\n delta = dist_max ** 2\n for i in range(nb_p):\n i0 = i * 3\n i_n = i0 + 1\n i_e = i0 + 2\n # If particle already set, we skip\n if m[i0] or m[i_n] or m[i_e]:\n continue\n # Distance with north\n dxn, dyn = x[i0] - x[i_n], y[i0] - y[i_n]\n dn = dxn ** 2 + dyn ** 2\n # Distance with east\n dxe, dye = x[i0] - x[i_e], y[i0] - y[i_e]\n de = dxe ** 2 + dye ** 2\n\n if dn >= delta or de >= delta:\n s1 = dn + de\n at1 = 2 * (dxe * dxn + dye * dyn)\n at2 = de - dn\n s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * (\n (dxn - dye) ** 2 + (dxe + dyn) ** 2\n )\n flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5))\n theta[i] = arctan2(at1, at2 + s2) * 180 / pi\n # To know where value are set\n m_set[i] = False\n # To stop particle advection\n m[i0], m[i_n], m[i_e] = True, True, True\n\n\n@njit(cache=True)\ndef build_triplet(x, y, step=0.02):\n \"\"\"\n Triplet building for each position we add east and north point with defined step\n \"\"\"\n nb_x = x.shape[0]\n x_ = empty(nb_x * 3, dtype=x.dtype)\n y_ = empty(nb_x * 3, dtype=y.dtype)\n for i in range(nb_x):\n i0 = i * 3\n i_n, i_e = i0 + 1, i0 + 2\n x__, y__ = x[i], y[i]\n x_[i0], y_[i0] = x__, y__\n x_[i_n], y_[i_n] = x__, y__ + step\n x_[i_e], y_[i_e] = x__ + step, y__\n return x_, y_" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Settings\n\n" + "Settings\n--------\n\n" ] }, { @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Particles\n\n" + "Particles\n---------\n\n" ] }, { @@ -105,7 +105,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## FSLE\n\n" + "FSLE\n----\n\n" ] }, { @@ -123,7 +123,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Display FSLE\n\n" + "Display FSLE\n------------\n\n" ] }, { @@ -134,14 +134,14 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(13, 5), dpi=150)\nax = fig.add_axes([0.03, 0.03, 0.90, 0.94])\nax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\nax.set_aspect(\"equal\")\nax.set_title(\"Finite size lyapunov exponent\", weight=\"bold\")\nkw = dict(cmap=\"viridis_r\", vmin=-15, vmax=0)\nm = fsle_custom.display(ax, 1 / fsle_custom.grid(\"fsle\"), **kw)\nax.grid()\n_ = plt.colorbar(m, cax=fig.add_axes([0.94, 0.05, 0.01, 0.9]))" + "fig = plt.figure(figsize=(13, 5), dpi=150)\nax = fig.add_axes([0.03, 0.03, 0.90, 0.94])\nax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\nax.set_aspect(\"equal\")\nax.set_title(\"Finite size lyapunov exponent\", weight=\"bold\")\nkw = dict(cmap=\"viridis_r\", vmin=-20, vmax=0)\nm = fsle_custom.display(ax, 1 / fsle_custom.grid(\"fsle\"), **kw)\nax.grid()\n_ = plt.colorbar(m, cax=fig.add_axes([0.94, 0.05, 0.01, 0.9]))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Display Theta\n\n" + "Display Theta\n-------------\n\n" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, From 92c326230f7a9ca8c53147f78890a8c7321de8f5 Mon Sep 17 00:00:00 2001 From: CoriPegliasco <66008544+CoriPegliasco@users.noreply.github.com> Date: Thu, 17 Jun 2021 21:36:41 +0200 Subject: [PATCH 030/115] - add comments + english (#93) --- share/tracking.yaml | 6 +++--- src/py_eddy_tracker/dataset/grid.py | 5 ++++- src/py_eddy_tracker/eddy_feature.py | 2 +- src/py_eddy_tracker/generic.py | 2 +- 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/share/tracking.yaml b/share/tracking.yaml index 0f8766b8..b9c98488 100644 --- a/share/tracking.yaml +++ b/share/tracking.yaml @@ -8,6 +8,6 @@ PATHS: TRACK_DURATION_MIN: 4 VIRTUAL_LENGTH_MAX: 0 -#CLASS: -# MODULE: py_eddy_tracker.featured_tracking.old_tracker_reference -# CLASS: CheltonTracker +CLASS: + MODULE: py_eddy_tracker.featured_tracking.area_tracker + CLASS: AreaTracker diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index bd9e70d3..a2237c61 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -617,7 +617,10 @@ def eddy_identification( :param float,None precision: Truncate values at the defined precision in m :param str force_height_unit: Unit used for height unit :param str force_speed_unit: Unit used for speed unit - :param dict kwargs: Argument given to amplitude + :param dict kwargs: Arguments given to amplitude (mle, nb_step_min, nb_step_to_be_mle). + Look at :py:meth:`py_eddy_tracker.eddy_feature.Amplitude` + The amplitude threshold is given by `step*nb_step_min` + :return: Return a list of 2 elements: Anticyclones and Cyclones :rtype: py_eddy_tracker.observations.observation.EddiesObservations diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index 037beb35..f6db848b 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -65,7 +65,7 @@ def __init__( :param float contour_height: :param array data: :param float interval: - :param int mle: maximum number of local maxima in contour + :param int mle: maximum number of local extrema in contour :param int nb_step_min: number of intervals to consider an eddy :param int nb_step_to_be_mle: number of intervals to be considered as an another maxima """ diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index a48c0e2f..94cf321f 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -309,7 +309,7 @@ def uniform_resample(x_val, y_val, num_fac=2, fixed_size=None): :param array_like x_val: input x contour coordinates :param array_like y_val: input y contour coordinates :param int num_fac: factor to increase lengths of output coordinates - :param int,None fixed_size: if define, it will used to set sampling + :param int,None fixed_size: if defined, will be used to set sampling """ nb = x_val.shape[0] # Get distances From 6bce49abe40204aa5628e490bf288d66880e9dd6 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Fri, 18 Jun 2021 09:12:13 +0200 Subject: [PATCH 031/115] typo and allow to choose netcdf format at output --- .../pet_eddy_detection_ACC.py | 13 +++------- examples/06_grid_manipulation/pet_lavd.py | 11 +++----- examples/07_cube_manipulation/pet_fsle_med.py | 2 +- .../16_network/pet_replay_segmentation.py | 8 +----- examples/16_network/pet_segmentation_anim.py | 3 +-- src/py_eddy_tracker/__init__.py | 13 +++------- src/py_eddy_tracker/appli/eddies.py | 15 +++-------- src/py_eddy_tracker/appli/network.py | 4 +-- src/py_eddy_tracker/observations/groups.py | 1 - src/py_eddy_tracker/observations/network.py | 25 +++++-------------- .../observations/observation.py | 24 ++++-------------- src/py_eddy_tracker/observations/tracking.py | 5 +--- tests/test_grid.py | 10 +------- 13 files changed, 30 insertions(+), 104 deletions(-) diff --git a/examples/02_eddy_identification/pet_eddy_detection_ACC.py b/examples/02_eddy_identification/pet_eddy_detection_ACC.py index c799a45e..e6c5e381 100644 --- a/examples/02_eddy_identification/pet_eddy_detection_ACC.py +++ b/examples/02_eddy_identification/pet_eddy_detection_ACC.py @@ -65,8 +65,7 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" y_name="latitude", # Manual area subset indexs=dict( - latitude=slice(100 - margin, 220 + margin), - longitude=slice(0, 230 + margin), + latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin), ), ) g_raw = RegularGridDataset(**kw_data) @@ -188,16 +187,10 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" ax.set_ylabel("With filter") ax.plot( - a_[field][i_a] * factor, - a[field][j_a] * factor, - "r.", - label="Anticyclonic", + a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic", ) ax.plot( - c_[field][i_c] * factor, - c[field][j_c] * factor, - "b.", - label="Cyclonic", + c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic", ) ax.set_aspect("equal"), ax.grid() ax.plot((0, 1000), (0, 1000), "g") diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index ed21738f..d96c0b06 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -142,8 +142,9 @@ def update(i_frame): kw_video = dict(frames=arange(nb_time), interval=1000.0 / step_by_day / 2, blit=True) fig, ax, txt = start_ax(dpi=60) -x_g_, y_g_ = arange(0 - step / 2, 36 + step / 2, step), arange( - 28 - step / 2, 46 + step / 2, step +x_g_, y_g_ = ( + arange(0 - step / 2, 36 + step / 2, step), + arange(28 - step / 2, 46 + step / 2, step), ) # pcolorfast will be faster than pcolormesh, we could use pcolorfast due to x and y are regular pcolormesh = ax.pcolorfast(x_g_, y_g_, lavd, **kw_vorticity) @@ -158,11 +159,7 @@ def update(i_frame): # Format LAVD data lavd = RegularGridDataset.with_array( coordinates=("lon", "lat"), - datas=dict( - lavd=lavd.T, - lon=x_g, - lat=y_g, - ), + datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,), centered=True, ) diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index cc221cf7..b128286a 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -27,7 +27,7 @@ # ADT in med # ---------- # :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is -# made for data stores in time cube, you could use also +# made for data stores in time cube, you could use also # :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to # load data-cube from multiple file. c = GridCollection.from_netcdf_cube( diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index 757854d5..d6b4568b 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -149,13 +149,7 @@ def get_obs(dataset): n_.median_filter(15, "time", "latitude") kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20 m = n_.scatter_timeline( - ax, - "shape_error_e", - vmin=14, - vmax=70, - **kw, - yfield="lon", - method="all", + ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all", ) ax.set_ylabel("Longitude") cb = update_axes(ax, m["scatter"]) diff --git a/examples/16_network/pet_segmentation_anim.py b/examples/16_network/pet_segmentation_anim.py index 340163a1..503229e7 100644 --- a/examples/16_network/pet_segmentation_anim.py +++ b/examples/16_network/pet_segmentation_anim.py @@ -96,8 +96,7 @@ def update(i_frame): indices_frames = INDICES[i_frame] mappable_CONTOUR.set_data( - e.contour_lon_e[indices_frames], - e.contour_lat_e[indices_frames], + e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames], ) mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)]) return (mappable_tracks,) diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 46946e77..d5db40f6 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -409,20 +409,14 @@ def parse_args(self, *args, **kwargs): nc_name="previous_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict( - long_name="Previous cost for previous observation", - comment="", - ), + nc_attr=dict(long_name="Previous cost for previous observation", comment="",), ), next_cost=dict( attr_name=None, nc_name="next_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict( - long_name="Next cost for next observation", - comment="", - ), + nc_attr=dict(long_name="Next cost for next observation", comment="",), ), n=dict( attr_name=None, @@ -633,8 +627,7 @@ def parse_args(self, *args, **kwargs): nc_type="f4", nc_dims=("obs",), nc_attr=dict( - long_name="Log base 10 background chlorophyll", - units="Log(Chl/[mg/m^3])", + long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])", ), ), year=dict( diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index d5a727f9..d30ef259 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -367,8 +367,7 @@ def track( logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max()) logger.info( - "The mean length is %d observations for long track", - c.nb_obs_by_tracks.mean(), + "The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(), ) long_track.write_file(**kw_write) @@ -378,14 +377,7 @@ def track( def get_group( - dataset1, - dataset2, - index1, - index2, - score, - invalid=2, - low=10, - high=60, + dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60, ): group1, group2 = dict(), dict() m_valid = (score * 100) >= invalid @@ -494,8 +486,7 @@ def get_values(v, dataset): ] labels = dict( - high=f"{high:0.0f} <= high", - low=f"{invalid:0.0f} <= low < {low:0.0f}", + high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}", ) keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()] diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index 5c4cdcaf..c1a752ee 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -76,9 +76,7 @@ def subset_network(): help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep", ) parser.add_argument( - "--remove_trash", - action="store_true", - help="Remove trash (network id == 0)", + "--remove_trash", action="store_true", help="Remove trash (network id == 0)", ) parser.add_argument( "-p", diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index c0924cb3..e77c81fe 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -100,7 +100,6 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): :params dict kwargs: dict of params given to `advect` """ - # Obs from initial time m_start = eddies.time == t_start diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 4f9af0b3..a8b1ebc0 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -679,13 +679,7 @@ def display_timeline( """ self.only_one_network() j = 0 - line_kw = dict( - ls="-", - marker="+", - markersize=6, - zorder=1, - lw=3, - ) + line_kw = dict(ls="-", marker="+", markersize=6, zorder=1, lw=3,) line_kw.update(kwargs) mappables = dict(lines=list()) @@ -918,10 +912,7 @@ def event_map(self, ax, **kwargs): """Add the merging and splitting events to a map""" j = 0 mappables = dict() - symbol_kw = dict( - markersize=10, - color="k", - ) + symbol_kw = dict(markersize=10, color="k",) symbol_kw.update(kwargs) symbol_kw_split = symbol_kw.copy() symbol_kw_split["markersize"] += 4 @@ -950,13 +941,7 @@ def event_map(self, ax, **kwargs): return mappables def scatter( - self, - ax, - name="time", - factor=1, - ref=None, - edgecolor_cycle=None, - **kwargs, + self, ax, name="time", factor=1, ref=None, edgecolor_cycle=None, **kwargs, ): """ This function scatters the path of each network, with the merging and splitting events @@ -1400,7 +1385,9 @@ def segment_coherence( .. code-block:: python def date2file(julian_day): - date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) + date = datetime.timedelta(days=julian_day) + datetime.datetime( + 1950, 1, 1 + ) return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 557c0279..0be29fe7 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -701,11 +701,7 @@ def load_file(cls, filename, **kwargs): .. code-block:: python kwargs_latlon_300 = dict( - include_vars=[ - "longitude", - "latitude", - ], - indexs=dict(obs=slice(0, 300)), + include_vars=["longitude", "latitude",], indexs=dict(obs=slice(0, 300)), ) small_dataset = TrackEddiesObservations.load_file( filename, **kwargs_latlon_300 @@ -1676,7 +1672,8 @@ def write_file( handler = zarr.open(filename, "w") self.to_zarr(handler, **kwargs) else: - with Dataset(filename, "w", format="NETCDF4") as handler: + nc_format = kwargs.pop("format", "NETCDF4") + with Dataset(filename, "w", format=nc_format) as handler: self.to_netcdf(handler, **kwargs) @property @@ -1967,11 +1964,7 @@ def bins_stat(self, xname, bins=None, yname=None, method=None, mask=None): def format_label(self, label): t0, t1 = self.period - return label.format( - t0=t0, - t1=t1, - nb_obs=len(self), - ) + return label.format(t0=t0, t1=t1, nb_obs=len(self),) def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies @@ -2330,14 +2323,7 @@ def grid_count_pixel_in( x_, y_ = reduce_size(x_, y_) v = create_vertice(x_, y_) (x_start, x_stop), (y_start, y_stop) = bbox_indice_regular( - v, - x_bounds, - y_bounds, - xstep, - ystep, - N, - is_circular, - x_size, + v, x_bounds, y_bounds, xstep, ystep, N, is_circular, x_size, ) i, j = get_pixel_in_regular(v, x_c, y_c, x_start, x_stop, y_start, y_stop) grid_count_(grid, i, j) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index b632270c..58514eb2 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -574,10 +574,7 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): def format_label(self, label): t0, t1 = self.period return label.format( - t0=t0, - t1=t1, - nb_obs=len(self), - nb_tracks=(self.nb_obs_by_track != 0).sum(), + t0=t0, t1=t1, nb_obs=len(self), nb_tracks=(self.nb_obs_by_track != 0).sum(), ) def plot(self, ax, ref=None, **kwargs): diff --git a/tests/test_grid.py b/tests/test_grid.py index 2c89550a..34187357 100644 --- a/tests/test_grid.py +++ b/tests/test_grid.py @@ -7,15 +7,7 @@ G = RegularGridDataset(get_demo_path("mask_1_60.nc"), "lon", "lat") X = 0.025 -contour = Path( - ( - (-X, 0), - (X, 0), - (X, X), - (-X, X), - (-X, 0), - ) -) +contour = Path(((-X, 0), (X, 0), (X, X), (-X, X), (-X, 0),)) # contour From 0834ed25301e0672957a0277f3e3f30e69fd6352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 18 Jun 2021 09:45:36 +0200 Subject: [PATCH 032/115] - changed log format, too many tabs - adding more logs - change generator time step on GridCollection, to work it needed more files than necessary. - added coherence computation --- CHANGELOG.rst | 3 + src/py_eddy_tracker/__init__.py | 4 +- src/py_eddy_tracker/dataset/grid.py | 22 +- src/py_eddy_tracker/observations/groups.py | 66 ++++- src/py_eddy_tracker/observations/network.py | 263 ++++++++++-------- .../observations/observation.py | 1 + 6 files changed, 213 insertions(+), 146 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 80def41c..87b5d870 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -12,9 +12,12 @@ Changed ^^^^^^^ Fixed ^^^^^ +- GridCollection get_next_time_step & get_previous_time_step needed more files to work in the dataset list. + The loop needed explicitly self.dataset[i+-1] even when i==0, therefore indice went out of range Added ^^^^^ + [3.4.0] - 2021-03-29 -------------------- Changed diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 46946e77..e17d0e2f 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -32,9 +32,7 @@ def start_logger(): - FORMAT_LOG = ( - "%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t\t\t\t\t%(message)s" - ) + FORMAT_LOG = "%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t%(message)s" logger = logging.getLogger("pet") if len(logger.handlers) == 0: # set up logging to CONSOLE diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index bd9e70d3..41522d14 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2257,11 +2257,13 @@ def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None): @classmethod def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None): new = cls() - for i, t in enumerate(t): - d = RegularGridDataset(filenames[i], x_name, y_name, indexs=indexs) + for i, _t in enumerate(t): + filename = filenames[i] + logger.debug(f"load file {i:02d}/{len(t)} t={_t} : {filename}") + d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) if heigth is not None: d.add_uv(heigth) - new.datasets.append((t, d)) + new.datasets.append((_t, d)) return new def shift_files(self, t, filename, heigth=None, **rgd_kwargs): @@ -2273,6 +2275,7 @@ def shift_files(self, t, filename, heigth=None, **rgd_kwargs): if heigth is not None: d.add_uv(heigth) self.datasets.append((t, d)) + logger.debug(f"shift and adding i={len(self.datasets)} t={t} : {filename}") def interp(self, grid_name, t, lons, lats, method="bilinear"): """ @@ -2433,6 +2436,7 @@ def advect( else: mask_particule += isnan(x) + isnan(y) while True: + logger.debug(f"advect : t={t}") if (backward and t <= t1) or (not backward and t >= t1): t0, u0, v0, m0 = t1, u1, v1, m1 t1, d1 = generator.__next__() @@ -2459,25 +2463,21 @@ def advect( yield t, x, y def get_next_time_step(self, t_init): - first = True for i, (t, dataset) in enumerate(self.datasets): if t < t_init: continue - if first: - first = False - yield self.datasets[i - 1] + + logger.debug(f"i={i}, t={t}, dataset={dataset}") yield t, dataset def get_previous_time_step(self, t_init): - first = True i = len(self.datasets) for t, dataset in reversed(self.datasets): i -= 1 if t > t_init: continue - if first: - first = False - yield self.datasets[i + 1] + + logger.debug(f"i={i}, t={t}, dataset={dataset}") yield t, dataset diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index c0924cb3..98e085c0 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -3,9 +3,10 @@ from numba import njit from numba import types as nb_types -from numpy import arange, int32, interp, median, where, zeros +from numpy import arange, int32, interp, median, where, zeros, meshgrid, concatenate from .observation import EddiesObservations +from ..poly import group_obs logger = logging.getLogger("pet") @@ -87,7 +88,53 @@ def advect(x, y, c, t0, n_days): return t, x, y -def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): +def create_particles(eddies, step): + """create particles only inside speed contour. Avoir creating too large numpy arrays, only to me masked + + :param eddies: network where eddies are + :type eddies: network + :param step: step for particles + :type step: float + :return: lon, lat and indices of particles in contour speed + :rtype: tuple(np.array) + """ + + lon = eddies.contour_lon_s + lat = eddies.contour_lat_s + + # compute bounding boxes of each eddies + lonMins = lon.min(axis=1) + lonMins = lonMins - (lonMins % step) + lonMaxs = lon.max(axis=1) + lonMaxs = lonMaxs - (lonMaxs % step) + step * 2 + + latMins = lat.min(axis=1) + latMins = latMins - (latMins % step) + latMaxs = lat.max(axis=1) + latMaxs = latMaxs - (latMaxs % step) + step * 2 + + lon = [] + lat = [] + # for each eddies, create mesh with particles then concatenate + for lonMin, lonMax, latMin, latMax in zip(lonMins, lonMaxs, latMins, latMaxs): + x0, y0 = meshgrid(arange(lonMin, lonMax, step), arange(latMin, latMax, step)) + + x0, y0 = x0.reshape(-1), y0.reshape(-1) + lon.append(x0) + lat.append(y0) + + x = concatenate(lon) + y = concatenate(lat) + + _, i = group_obs(x, y, 1, 360) + x, y = x[i], y[i] + + i_start = eddies.contains(x, y, intern=True) + m = i_start != -1 + return x[m], y[m], i_start[m] + + +def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): """Select particles within eddies, advect them, return target observation and associated percentages :param np.array(float) x: longitude of particles @@ -100,27 +147,28 @@ def particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs): :params dict kwargs: dict of params given to `advect` """ - # Obs from initial time m_start = eddies.time == t_start - e = eddies.extract_with_mask(m_start) + # to be able to get global index translate_start = where(m_start)[0] - # Identify particle in eddies (only in core) - i_start = e.contains(x, y, intern=True) - m = i_start != -1 - x, y, i_start = x[m], y[m], i_start[m] - # Advect + x, y, i_start = create_particles(e, step_mesh) + + # Advection t_end, x, y = advect(x, y, c, t_start, **kwargs) + # eddies at last date m_end = eddies.time == t_end / 86400 e_end = eddies.extract_with_mask(m_end) + # to be able to get global index translate_end = where(m_end)[0] + # Id eddies for each alive particle (in core and extern) i_end = e_end.contains(x, y) + # compute matrix and fill target array get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 0e5b9576..747bd976 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -3,11 +3,10 @@ Class to create network of observations """ import logging +import time from glob import glob -import zarr from numba import njit -from numba import types as nb_types from numpy import ( arange, array, @@ -16,7 +15,6 @@ concatenate, empty, in1d, - meshgrid, ones, uint16, uint32, @@ -27,7 +25,7 @@ from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude -from ..poly import bbox_intersection, group_obs, vertice_overlap +from ..poly import bbox_intersection, vertice_overlap from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter @@ -1276,15 +1274,14 @@ def extract_segment(self, segments, absolute=False): mask[i] = False return self.extract_with_mask(mask) - def extract_with_period(self, period): + def get_mask_with_period(self, period): """ - Extract within a time period + obtain mask within a time period :param (int,int) period: two dates to define the period, must be specified from 1/1/1950 - :return: Return all eddy trajectories in period - :rtype: NetworkObservations + :return: mask where period is defined + :rtype: np.array(bool) - .. minigallery:: py_eddy_tracker.NetworkObservations.extract_with_period """ dataset_period = self.period p_min, p_max = period @@ -1298,7 +1295,57 @@ def extract_with_period(self, period): mask *= self.time <= p_max elif p_max < 0: mask *= self.time <= (dataset_period[1] + p_max) - return self.extract_with_mask(mask) + return mask + + def extract_with_period(self, period): + """ + Extract within a time period + + :param (int,int) period: two dates to define the period, must be specified from 1/1/1950 + :return: Return all eddy trajectories in period + :rtype: NetworkObservations + + .. minigallery:: py_eddy_tracker.NetworkObservations.extract_with_period + """ + + return self.extract_with_mask(self.get_mask_with_period(period)) + + def extract_light_with_mask(self, mask): + """extract data with mask, but only with variables used for coherence, aka self.array_variables + + :param mask: mask used to extract + :type mask: np.array(bool) + :return: new EddiesObservation with data wanted + :rtype: self + """ + + if isinstance(mask, slice): + nb_obs = mask.stop - mask.start + else: + nb_obs = mask.sum() + + # only time & contour_lon/lat_e/s + variables = ["time"] + self.array_variables + new = self.__class__( + size=nb_obs, + track_extra_variables=[], + track_array_variables=self.track_array_variables, + array_variables=self.array_variables, + only_variables=variables, + raw_data=self.raw_data, + ) + new.sign_type = self.sign_type + if nb_obs == 0: + logger.warning("Empty dataset will be created") + else: + logger.info( + f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" + ) + + for field in variables: + logger.debug("Copy of field %s ...", field) + new.obs[field] = self.obs[field][mask] + return new def extract_with_mask(self, mask): """ @@ -1317,7 +1364,7 @@ def extract_with_mask(self, mask): if nb_obs == 0: logger.warning("Empty dataset will be created") else: - logger.info( + logger.debug( f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" ) for field in self.obs.dtype.descr: @@ -1372,156 +1419,126 @@ def analysis_coherence( return network_clean, res - def segment_coherence( - self, - date_function, - uv_params, - advection_mode="both", - dt_advect=14, - step_mesh=1.0 / 50, - output_name=None, + def segment_coherence_backward( + self, date_function, uv_params, n_days=14, step_mesh=1.0 / 50, output_name=None, ): """ - Percentage of particules and their targets after forward or/and backward advection from a specific eddy. + Percentage of particules and their targets after backward advection from a specific eddy. :param callable date_function: python function, takes as param `int` (julian day) and return data filename associated to the date (see note) :param dict uv_params: dict of parameters used by :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` - :param str advection_mode: "backward", "forward" or "both" - :param int dt_advect: days for advection + :param int n_days: days for advection :param float step_mesh: step for particule mesh in degrees - :param str output_name: if not None, name of file saved in zarr. Else, data will not be saved - :return: list of 2 or 4 array (depending if forward, backward or both) with segment matchs, and percents + :return: observations matchs, and percents .. note:: the param `date_function` should be something like : .. code-block:: python def date2file(julian_day): - date = datetime.timedelta(days=julian_day) + datetime.datetime(1950, 1, 1) - - return f"/tmp/dt_global_allsat_phy_l4_{date.strftime('%Y%m%d')}.nc" + date = datetime.timedelta(days=julian_day) + datetime.datetime( + 1950, 1, 1 + ) + return f"/tmp/dt_global_{date.strftime('%Y%m%d')}.nc" """ - if advection_mode in ["both", "forward"]: - itf_final = -ones((self.obs.size, 2), dtype="i4") - ptf_final = zeros((self.obs.size, 2), dtype="i1") + itb_final = -ones((self.obs.size, 2), dtype="i4") + ptb_final = zeros((self.obs.size, 2), dtype="i1") - if advection_mode in ["both", "backward"]: - itb_final = -ones((self.obs.size, 2), dtype="i4") - ptb_final = zeros((self.obs.size, 2), dtype="i1") + t_start, t_end = self.period - for slice_track, b0, _ in self.iter_on(self.track): - if b0 == 0: - continue - - sub_networks = self.network(b0) + dates = arange(t_start, t_start + n_days + 1) + first_files = [date_function(x) for x in dates] - # find extremum to create a mesh of particles - lon = sub_networks.contour_lon_s - lonMin = lon.min() - 0.1 - lonMax = lon.max() + 0.1 + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + first = True + range_start = t_start + n_days + range_end = t_end + 1 - lat = sub_networks.contour_lat_s - latMin = lat.min() - 0.1 - latMax = lat.max() + 0.1 + for _t in range(t_start + n_days, t_end + 1): + _timestamp = time.time() + t_shift = _t - x0, y0 = meshgrid( - arange(lonMin, lonMax, step_mesh), arange(latMin, latMax, step_mesh) + # skip first shift, because already included + if first: + first = False + else: + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + c, self, step_mesh, _t, itb_final, ptb_final, n_days=-n_days + ) + logger.info( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%}) : {time.time()-_timestamp:5.2f}s" ) - x0, y0 = x0.reshape(-1), y0.reshape(-1) - _, i = group_obs(x0, y0, 1, 360) - x0, y0 = x0[i], y0[i] - - t_start, t_end = sub_networks.period - shape = (sub_networks.obs.size, 2) - - if advection_mode in ["both", "forward"]: - - # first dates to load. - dates = arange(t_start - 1, t_start + dt_advect + 2) - # files associated with dates - first_files = [date_function(x) for x in dates] - - c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) - - i_target_f = -ones(shape, dtype="i4") - pct_target_f = zeros(shape, dtype="i1") - - for _t in range(t_start, t_end - dt_advect + 1): - t_shift = _t + dt_advect + 2 - - # add next date to GridCollection and delete last date - c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) - particle_candidate( - x0, - y0, - c, - sub_networks, - _t, - i_target_f, - pct_target_f, - delta_t=dt_advect, - ) - itf_final[slice_track] = i_target_f - ptf_final[slice_track] = pct_target_f + return itb_final, ptb_final - if advection_mode in ["both", "backward"]: + def segment_coherence_forward( + self, date_function, uv_params, n_days=14, step_mesh=1.0 / 50, + ): - # first dates to load. - dates = arange(t_start - 1, t_start + dt_advect + 2) - # files associated with dates - first_files = [date_function(x) for x in dates] + """ + Percentage of particules and their targets after forward advection from a specific eddy. - c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + :param callable date_function: python function, takes as param `int` (julian day) and return + data filename associated to the date (see note) + :param dict uv_params: dict of parameters used by + :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` + :param int n_days: days for advection + :param float step_mesh: step for particule mesh in degrees + :return: observations matchs, and percents - i_target_b = -ones(shape, dtype="i4") - pct_target_b = zeros(shape, dtype="i1") + .. note:: the param `date_function` should be something like : - for _t in range(t_start + dt_advect + 1, t_end + 1): - t_shift = _t + 1 + .. code-block:: python - # add next date to GridCollection and delete last date - c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) - particle_candidate( - x0, - y0, - c, - sub_networks, - _t, - i_target_b, - pct_target_b, - delta_t=-dt_advect, + def date2file(julian_day): + date = datetime.timedelta(days=julian_day) + datetime.datetime( + 1950, 1, 1 ) - itb_final[slice_track] = i_target_b - ptb_final[slice_track] = pct_target_b + return f"/tmp/dt_global_{date.strftime('%Y%m%d')}.nc" + """ + + itf_final = -ones((self.obs.size, 2), dtype="i4") + ptf_final = zeros((self.obs.size, 2), dtype="i1") - if output_name is not None: - zg = zarr.open(output_name, "w") + t_start, t_end = self.period + # if begin is not None and begin > t_start: + # t_start = begin + # if end is not None and end < t_end: + # t_end = end - # zarr compression parameters - params_seg = dict() - params_pct = dict() + dates = arange(t_start, t_start + n_days + 1) + first_files = [date_function(x) for x in dates] - res = [] - if advection_mode in ["forward", "both"]: - res = res + [itf_final, ptf_final] - if output_name is not None: - zg.array("i_target_forward", itf_final, **params_seg) - zg.array("pct_target_forward", ptf_final, **params_pct) + c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) + first = True + range_start = t_start + range_end = t_end - n_days + 1 - if advection_mode in ["backward", "both"]: - res = res + [itb_final, ptb_final] - if output_name is not None: - zg.array("i_target_backward", itb_final, **params_seg) - zg.array("pct_target_backward", ptb_final, **params_pct) + for _t in range(range_start, range_end): + _timestamp = time.time() + t_shift = _t + n_days - return res + # skip first shift, because already included + if first: + first = False + else: + # add next date to GridCollection and delete last date + c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) + particle_candidate( + c, self, step_mesh, _t, itf_final, ptf_final, n_days=n_days + ) + logger.info( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%}) : {time.time()-_timestamp:5.2f}s" + ) + return itf_final, ptf_final class Network: diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 3d91ad42..e8998dd6 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -723,6 +723,7 @@ def load_file(cls, filename, **kwargs): zarr_file = filename_.endswith(end) else: zarr_file = False + logger.info(f"loading file '{filename}'") if zarr_file: return cls.load_from_zarr(filename, **kwargs) else: From fe79f1bad737e17aa4c5c5f5715c041dec91d4a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:32:03 +0200 Subject: [PATCH 033/115] correction of example --- examples/16_network/pet_follow_particle.py | 10 ++-------- src/py_eddy_tracker/observations/groups.py | 2 -- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index e5451daa..36639d3f 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -128,12 +128,6 @@ def update(frame): # ^^^^^^^^^^^^^^^^^^ step = 1 / 60.0 -x, y = meshgrid(arange(24, 36, step), arange(31, 36, step)) -x0, y0 = x.reshape(-1), y.reshape(-1) -# Pre-order to speed up -_, i = group_obs(x0, y0, 1, 360) -x0, y0 = x0[i], y0[i] - t_start, t_end = n.period dt = 14 @@ -141,12 +135,12 @@ def update(frame): # Forward run i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start, t_end - dt): - particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt) + particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt) # Backward run i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") for t in range(t_start + dt, t_end): - particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt) + particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt) # %% fig = plt.figure(figsize=(10, 10)) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 98e085c0..08053331 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -137,8 +137,6 @@ def create_particles(eddies, step): def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): """Select particles within eddies, advect them, return target observation and associated percentages - :param np.array(float) x: longitude of particles - :param np.array(float) y: latitude of particles :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles :param GroupEddiesObservations eddies: GroupEddiesObservations considered :param int t_start: julian day of the advection From 57408b8f1cee5832e6a40c1ac77c7790de0220ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Mon, 21 Jun 2021 12:35:24 +0200 Subject: [PATCH 034/115] flake8 corrections --- src/py_eddy_tracker/observations/groups.py | 2 +- src/py_eddy_tracker/observations/network.py | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 08053331..0ecfd515 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -89,7 +89,7 @@ def advect(x, y, c, t0, n_days): def create_particles(eddies, step): - """create particles only inside speed contour. Avoir creating too large numpy arrays, only to me masked + """create particles only inside speed contour. Avoid creating too large numpy arrays, only to me masked :param eddies: network where eddies are :type eddies: network diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 747bd976..8d9b40b9 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1472,9 +1472,10 @@ def date2file(julian_day): particle_candidate( c, self, step_mesh, _t, itb_final, ptb_final, n_days=-n_days ) - logger.info( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%}) : {time.time()-_timestamp:5.2f}s" - ) + logger.info(( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" + f" : {time.time()-_timestamp:5.2f}s" + )) return itb_final, ptb_final @@ -1535,9 +1536,10 @@ def date2file(julian_day): particle_candidate( c, self, step_mesh, _t, itf_final, ptf_final, n_days=n_days ) - logger.info( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%}) : {time.time()-_timestamp:5.2f}s" - ) + logger.info(( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" + f" : {time.time()-_timestamp:5.2f}s" + )) return itf_final, ptf_final From 7b4aba66a4477bad9788844fddc2b29e4e6faa46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Mon, 21 Jun 2021 12:38:38 +0200 Subject: [PATCH 035/115] flake correction --- examples/16_network/pet_follow_particle.py | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 36639d3f..a2e72d5a 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -16,7 +16,6 @@ from py_eddy_tracker.dataset.grid import GridCollection from py_eddy_tracker.observations.groups import particle_candidate from py_eddy_tracker.observations.network import NetworkObservations -from py_eddy_tracker.poly import group_obs start_logger().setLevel("ERROR") From 3a75c0157bc895f16dd8bfdb1a3af17b1e9a2c4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Mon, 21 Jun 2021 21:34:18 +0200 Subject: [PATCH 036/115] better create_particles --- requirements.txt | 2 +- src/py_eddy_tracker/observations/groups.py | 53 +++++++++------------- 2 files changed, 22 insertions(+), 33 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9539c555..097e786a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ matplotlib netCDF4 -numba +numba>=0.53 numpy opencv-python pint diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 0ecfd515..3d028e12 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -3,10 +3,10 @@ from numba import njit from numba import types as nb_types -from numpy import arange, int32, interp, median, where, zeros, meshgrid, concatenate +from numpy import arange, array, int32, interp, median, where, zeros +from ..poly import create_vertice, reduce_size, winding_number_poly from .observation import EddiesObservations -from ..poly import group_obs logger = logging.getLogger("pet") @@ -88,6 +88,24 @@ def advect(x, y, c, t0, n_days): return t, x, y +@njit(cache=True) +def _create_meshed_particles(lons, lats, step): + x_out, y_out, i_out = list(), list(), list() + for i, (lon, lat) in enumerate(zip(lons, lats)): + lon_min, lon_max = lon.min(), lon.max() + lat_min, lat_max = lat.min(), lat.max() + lon_min -= lon_min % step + lon_max -= lon_max % step - step * 2 + lat_min -= lat_min % step + lat_max -= lat_max % step - step * 2 + + for x in arange(lon_min, lon_max, step): + for y in arange(lat_min, lat_max, step): + if winding_number_poly(x, y, create_vertice(*reduce_size(lon, lat))): + x_out.append(x), y_out.append(y), i_out.append(i) + return array(x_out), array(y_out), array(i_out) + + def create_particles(eddies, step): """create particles only inside speed contour. Avoid creating too large numpy arrays, only to me masked @@ -102,36 +120,7 @@ def create_particles(eddies, step): lon = eddies.contour_lon_s lat = eddies.contour_lat_s - # compute bounding boxes of each eddies - lonMins = lon.min(axis=1) - lonMins = lonMins - (lonMins % step) - lonMaxs = lon.max(axis=1) - lonMaxs = lonMaxs - (lonMaxs % step) + step * 2 - - latMins = lat.min(axis=1) - latMins = latMins - (latMins % step) - latMaxs = lat.max(axis=1) - latMaxs = latMaxs - (latMaxs % step) + step * 2 - - lon = [] - lat = [] - # for each eddies, create mesh with particles then concatenate - for lonMin, lonMax, latMin, latMax in zip(lonMins, lonMaxs, latMins, latMaxs): - x0, y0 = meshgrid(arange(lonMin, lonMax, step), arange(latMin, latMax, step)) - - x0, y0 = x0.reshape(-1), y0.reshape(-1) - lon.append(x0) - lat.append(y0) - - x = concatenate(lon) - y = concatenate(lat) - - _, i = group_obs(x, y, 1, 360) - x, y = x[i], y[i] - - i_start = eddies.contains(x, y, intern=True) - m = i_start != -1 - return x[m], y[m], i_start[m] + return _create_meshed_particles(lon, lat, step) def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): From 2f20ca6b6774a9983afbb9e542dee3f5c5c97b93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Mon, 21 Jun 2021 22:04:59 +0200 Subject: [PATCH 037/115] create particles in class method --- src/py_eddy_tracker/observations/groups.py | 40 +------------------ .../observations/observation.py | 32 +++++++++++++++ 2 files changed, 34 insertions(+), 38 deletions(-) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 3d028e12..64a81a36 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -3,9 +3,8 @@ from numba import njit from numba import types as nb_types -from numpy import arange, array, int32, interp, median, where, zeros +from numpy import arange, int32, interp, median, where, zeros -from ..poly import create_vertice, reduce_size, winding_number_poly from .observation import EddiesObservations logger = logging.getLogger("pet") @@ -88,41 +87,6 @@ def advect(x, y, c, t0, n_days): return t, x, y -@njit(cache=True) -def _create_meshed_particles(lons, lats, step): - x_out, y_out, i_out = list(), list(), list() - for i, (lon, lat) in enumerate(zip(lons, lats)): - lon_min, lon_max = lon.min(), lon.max() - lat_min, lat_max = lat.min(), lat.max() - lon_min -= lon_min % step - lon_max -= lon_max % step - step * 2 - lat_min -= lat_min % step - lat_max -= lat_max % step - step * 2 - - for x in arange(lon_min, lon_max, step): - for y in arange(lat_min, lat_max, step): - if winding_number_poly(x, y, create_vertice(*reduce_size(lon, lat))): - x_out.append(x), y_out.append(y), i_out.append(i) - return array(x_out), array(y_out), array(i_out) - - -def create_particles(eddies, step): - """create particles only inside speed contour. Avoid creating too large numpy arrays, only to me masked - - :param eddies: network where eddies are - :type eddies: network - :param step: step for particles - :type step: float - :return: lon, lat and indices of particles in contour speed - :rtype: tuple(np.array) - """ - - lon = eddies.contour_lon_s - lat = eddies.contour_lat_s - - return _create_meshed_particles(lon, lat, step) - - def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): """Select particles within eddies, advect them, return target observation and associated percentages @@ -141,7 +105,7 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): # to be able to get global index translate_start = where(m_start)[0] - x, y, i_start = create_particles(e, step_mesh) + x, y, i_start = e.create_particles(step_mesh) # Advection t_end, x, y = advect(x, y, c, t_start, **kwargs) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index aa73b28d..d969f800 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -69,6 +69,7 @@ poly_indexs, reduce_size, vertice_overlap, + winding_number_poly ) logger = logging.getLogger("pet") @@ -2274,6 +2275,19 @@ def nb_days(self): """ return self.period[1] - self.period[0] + 1 + def create_particles(self, step, intern=True): + """create particles only inside speed contour. Avoid creating too large numpy arrays, only to me masked + + :param step: step for particles + :type step: float + :param bool intern: If true use speed contour instead of effective contour + :return: lon, lat and indices of particles + :rtype: tuple(np.array) + """ + + xname, yname = self.intern(intern) + return _create_meshed_particles(self[xname], self[yname], step) + @njit(cache=True) def grid_count_(grid, i, j): @@ -2430,6 +2444,24 @@ def grid_stat(x_c, y_c, grid, x, y, result, circular=False, method="mean"): result[elt] = v_max +@njit(cache=True) +def _create_meshed_particles(lons, lats, step): + x_out, y_out, i_out = list(), list(), list() + for i, (lon, lat) in enumerate(zip(lons, lats)): + lon_min, lon_max = lon.min(), lon.max() + lat_min, lat_max = lat.min(), lat.max() + lon_min -= lon_min % step + lon_max -= lon_max % step - step * 2 + lat_min -= lat_min % step + lat_max -= lat_max % step - step * 2 + + for x in arange(lon_min, lon_max, step): + for y in arange(lat_min, lat_max, step): + if winding_number_poly(x, y, create_vertice(*reduce_size(lon, lat))): + x_out.append(x), y_out.append(y), i_out.append(i) + return array(x_out), array(y_out), array(i_out) + + class VirtualEddiesObservations(EddiesObservations): """Class to work with virtual obs""" From 1f21d33d49b5aea6f2c6a45852a406fe37fadf29 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Wed, 16 Jun 2021 23:51:33 +0200 Subject: [PATCH 038/115] management of floating time --- CHANGELOG.rst | 4 ++ doc/run_tracking.rst | 4 +- .../06_grid_manipulation/pet_okubo_weiss.py | 2 +- examples/16_network/pet_follow_particle.py | 5 +++ .../16_network/pet_follow_particle.ipynb | 12 +++--- src/py_eddy_tracker/__init__.py | 7 ++- src/py_eddy_tracker/appli/eddies.py | 40 +++++++++++------- src/py_eddy_tracker/appli/grid.py | 15 +++++-- .../data/Anticyclonic_20190223.nc | Bin 907023 -> 908188 bytes src/py_eddy_tracker/gui.py | 4 +- src/py_eddy_tracker/observations/groups.py | 4 +- .../observations/observation.py | 2 + src/py_eddy_tracker/observations/tracking.py | 15 ++++--- src/py_eddy_tracker/tracking.py | 9 ++-- src/scripts/EddyTranslate | 9 ++-- 15 files changed, 86 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 87b5d870..d7da20d3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,6 +10,10 @@ and this project adheres to `Semantic Versioning First target\nLatitude") +ax_2nd_b.set_ylabel("Color -> Secondary target\nLatitude") +ax_2nd_b.set_xlabel("Julian days"), ax_2nd_f.set_xlabel("Julian days") +ax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([]) +ax_1st_f.set_xticks([]), ax_1st_b.set_xticks([]) def color_alpha(target, pct, vmin=5, vmax=80): diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 6be13adf..b6723a97 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Follow particle\n" + "\nFollow particle\n===============\n" ] }, { @@ -55,7 +55,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Schema\n\n" + "Schema\n------\n\n" ] }, { @@ -73,7 +73,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Animation\nParticle settings\n\n" + "Animation\n---------\nParticle settings\n\n" ] }, { @@ -109,7 +109,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Particle advection\n\n" + "Particle advection\n^^^^^^^^^^^^^^^^^^\n\n" ] }, { @@ -131,7 +131,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()" + "fig = plt.figure(figsize=(10, 10))\nax_1st_b = fig.add_axes([0.05, 0.52, 0.45, 0.45])\nax_2nd_b = fig.add_axes([0.05, 0.05, 0.45, 0.45])\nax_1st_f = fig.add_axes([0.52, 0.52, 0.45, 0.45])\nax_2nd_f = fig.add_axes([0.52, 0.05, 0.45, 0.45])\nax_1st_b.set_title(\"Backward advection for each time step\")\nax_1st_f.set_title(\"Forward advection for each time step\")\nax_1st_b.set_ylabel(\"Color -> First target\\nLatitude\")\nax_2nd_b.set_ylabel(\"Color -> Secondary target\\nLatitude\")\nax_2nd_b.set_xlabel(\"Julian days\"), ax_2nd_f.set_xlabel(\"Julian days\")\nax_1st_f.set_yticks([]), ax_2nd_f.set_yticks([])\nax_1st_f.set_xticks([]), ax_1st_b.set_xticks([])\n\n\ndef color_alpha(target, pct, vmin=5, vmax=80):\n color = cmap(n.segment[target])\n # We will hide under 5 % and from 80% to 100 % it will be 1\n alpha = (pct - vmin) / (vmax - vmin)\n alpha[alpha < 0] = 0\n alpha[alpha > 1] = 1\n color[:, 3] = alpha\n return color\n\n\nkw = dict(\n name=None, yfield=\"longitude\", event=False, zorder=-100, s=(n.speed_area / 20e6)\n)\nn.scatter_timeline(ax_1st_b, c=color_alpha(i_target_b.T[0], pct_target_b.T[0]), **kw)\nn.scatter_timeline(ax_2nd_b, c=color_alpha(i_target_b.T[1], pct_target_b.T[1]), **kw)\nn.scatter_timeline(ax_1st_f, c=color_alpha(i_target_f.T[0], pct_target_f.T[0]), **kw)\nn.scatter_timeline(ax_2nd_f, c=color_alpha(i_target_f.T[1], pct_target_f.T[1]), **kw)\nfor ax in (ax_1st_b, ax_2nd_b, ax_1st_f, ax_2nd_f):\n n.display_timeline(ax, field=\"longitude\", marker=\"+\", lw=2, markersize=5)\n ax.grid()" ] } ], @@ -151,7 +151,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 5cf0d59a..971914f8 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -106,12 +106,17 @@ def parse_args(self, *args, **kwargs): return opts +TIME_MODELS = ["%Y%m%d", "%Y%m%d%H%M%S", "%Y%m%dT%H%M%S"] + + VAR_DESCR = dict( time=dict( attr_name="time", nc_name="time", old_nc_name=["j1"], - nc_type="int32", + nc_type="float64", + output_type="uint32", + scale_factor=1 / 86400.0, nc_dims=("obs",), nc_attr=dict( standard_name="time", diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index d30ef259..9c57d818 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -15,7 +15,7 @@ from numpy import bincount, bytes_, empty, in1d, unique from yaml import safe_load -from .. import EddyParser +from .. import TIME_MODELS, EddyParser from ..observations.observation import EddiesObservations, reverse_index from ..observations.tracking import TrackEddiesObservations from ..tracking import Correspondances @@ -223,7 +223,7 @@ def browse_dataset_in( data_dir, files_model, date_regexp, - date_model, + date_model=None, start_date=None, end_date=None, sub_sampling_step=1, @@ -238,11 +238,7 @@ def browse_dataset_in( filenames = bytes_(glob(full_path)) dataset_list = empty( - len(filenames), - dtype=[ - ("filename", "S500"), - ("date", "datetime64[D]"), - ], + len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")], ) dataset_list["filename"] = filenames @@ -268,10 +264,21 @@ def browse_dataset_in( str_date = result.groups()[0] if str_date is not None: - item["date"] = datetime.strptime(str_date, date_model).date() + if date_model is None: + model_found = False + for model in TIME_MODELS: + try: + item["date"] = datetime.strptime(str_date, model) + model_found = True + break + except ValueError: + pass + if not model_found: + raise Exception("No time model found") + else: + item["date"] = datetime.strptime(str_date, date_model) dataset_list.sort(order=["date", "filename"]) - steps = unique(dataset_list["date"][1:] - dataset_list["date"][:-1]) if len(steps) > 1: raise Exception("Several days steps in grid dataset %s" % steps) @@ -304,7 +311,7 @@ def track( correspondances_only=False, **kw_c, ): - kw = dict(date_regexp=".*_([0-9]*?).[nz].*", date_model="%Y%m%d") + kw = dict(date_regexp=".*_([0-9]*?).[nz].*") if isinstance(pattern, list): kw.update(dict(data_dir=None, files_model=None, files=pattern)) else: @@ -323,10 +330,9 @@ def track( c = Correspondances(datasets=datasets["filename"], **kw_c) c.track() logger.info("Track finish") - t0, t1 = c.period kw_save = dict( - date_start=t0, - date_stop=t1, + date_start=datasets["date"][0], + date_stop=datasets["date"][-1], date_prod=datetime.now(), path=output_dir, sign_type=c.current_obs.sign_legend, @@ -351,11 +357,13 @@ def track( short_c = c._copy() short_c.shorter_than(size_max=nb_obs_min) - c.longer_than(size_min=nb_obs_min) - - long_track = c.merge(raw_data=raw) short_track = short_c.merge(raw_data=raw) + if c.longer_than(size_min=nb_obs_min) is False: + long_track = short_track.empty_dataset() + else: + long_track = c.merge(raw_data=raw) + # We flag obs if c.virtual: long_track["virtual"][:] = long_track["time"] == 0 diff --git a/src/py_eddy_tracker/appli/grid.py b/src/py_eddy_tracker/appli/grid.py index 7f2b9610..7a746a8f 100644 --- a/src/py_eddy_tracker/appli/grid.py +++ b/src/py_eddy_tracker/appli/grid.py @@ -5,7 +5,7 @@ from argparse import Action from datetime import datetime -from .. import EddyParser +from .. import TIME_MODELS, EddyParser from ..dataset.grid import RegularGridDataset, UnRegularGridDataset @@ -121,7 +121,16 @@ def eddy_id(args=None): cut_wavelength = [0, *cut_wavelength] inf_bnds, upper_bnds = cut_wavelength - date = datetime.strptime(args.datetime, "%Y%m%d") + model_found = False + for model in TIME_MODELS: + try: + date = datetime.strptime(args.datetime, model) + model_found = True + break + except ValueError: + pass + if not model_found: + raise Exception("No time model found") kwargs = dict( step=args.isoline_step, shape_error=args.fit_errmax, @@ -150,7 +159,7 @@ def eddy_id(args=None): sampling_method=args.sampling_method, **kwargs, ) - out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%d.nc") + out_name = date.strftime("%(path)s/%(sign_type)s_%Y%m%dT%H%M%S.nc") a.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr) c.write_file(path=args.path_out, filename=out_name, zarr_flag=args.zarr) diff --git a/src/py_eddy_tracker/data/Anticyclonic_20190223.nc b/src/py_eddy_tracker/data/Anticyclonic_20190223.nc index ce48c8d60a4779176855f1c3108911b2f2d50c91..dc5fe0d3693f42f6165c4138e2e6ee8f30f74bf5 100644 GIT binary patch delta 11315 zcmbVS34Bz={h#-C-`nGD4st+F*gzng5JCDggx% zL?kRS3J3{UMcW!|)J1MY{#pts1w|=WP`OluB2+=(Kfjqbo81Nau^m3_yf-s%=68PQ zH^0dXZw*@K7_`D7uKQ5ib#Ntz%ZmGmBt_a z)Qb*8OCwpmMXk|$_7)*P^!07dV87i=6WIv(9p9Yc@8jzWkK3Bzmo^27a$kFd&|e7| zdGpy(xmiV`7s3m6NoHKQLz3;A#0()6&xn>n?Dxb+f(|m$&}Q~MV$)44wHIk(^L}XC zdN6ZLZWhubGOP<3Qek;(85?OtM)i}b7cM-JQ(T;l0+LyakR_lUbl#P@;VDSi`pN+N z$C}4eO#={_M?l`lL}n~4ur-!fR1V0ho?cx+KJ?WiDh`CqbQ&rmHX*usi4@Z{Y;I_#4S=B7ESOw@^q5Lw~v8ui_? zU6s{+Bhh;0yAa6J+ohuTJDN9nbMhd&s#kpT|V!X}Z6|%VPol zf%ik&eIC+Iorv^#5(G96g|?3*4}1YX+I;^9)D-v!F5?648*k56W;r$5?Y?f-5cH7t z;D@wx=;3^DfrmbLd=m|DoTmLjZ@@&6Zd6sTYG8ns)0+pmpo4qv*XXq zm4##h9iWd-hF$fIr|y>BF<~8>kNx7>4sCbiEw$tgb@X7 z16cNB%|!GvG!!8Oi(UBSl~QyY#4`YmM&NmIU;be+N`jX8%!1O3x2!+d6K9 zx-sfTpWDiAbaBQAUxe2}--xq>phls&Ocpw~?I(@)p1=!{fPk$I+4AC5NgnXBiUN~Z zm%4Y)SJ9s6&7~|l^ko^mw&%x`D#U#A?CH_cz{&qeU;f$UHU6H)@YZtnX{X7&o^!U6 z8AF!6^@;vNRKrCaSa>brnU&<*AM@A0n3J28>5OdXd<)j6?8Xa^dPUlEEhfUA;h5L7 zaiBE#PYVPGdh)NdV0dADZJn*YvD(%^dH_~I0({^sTYW=yZB0sIYz>Vf?)O{A{$pMN zg3o}s_`XTWaaHjZLxv2FAB-=4=5-iPIOY`=3VRBlZl-lsRcT#)ZDsW|TZ47BeMZP( z+a0Kae71k0mQzP>Q#V#J)s^4+R*#$YN;vi7Dds@F0ou;|d!DBsqwg51cPeG5PJgi%doY6KNqJ*(sx@Th~r;MIx8*cKHQBs@dsNdSg?uEtnJ;~POzwfp-qcKI<*#a?@dr<6g zA&T8+j#&&_)t)ftt^uDpkkDd-hdG-&~!U+z;bG7NpAGZ1zP9 z;Zp)h0CbK%Q9;DR$rC|OK(XH@UX4ZIPbh*J>*Af6gHR*PtD;zIP3RC5t2Th~qW{~p0m$>r3TdyNUO(=T_$^bGd&%iQap zqxycm{Meh{{|vB&lh=r5O~*T;y?JV&qh^hG(V$~GN(LP@ebfuMx?e|g3%fd0=Q{xX zWM6bT-%?=ktbHU7ha)G?e`Z=Q}WI zZ_BT8K6M%7${KQ8&y^+Q$}n=J0;z0JE|4pe-*5L9wo~T|cT@MR8w`lSk$@dMW2`a? z-*F%>P3-*$Oh^hl zI%9Qf;YVj;-yUVt-&cH?Qb4bDpkC`d}Dpf z;_O#N@?Yb~epTByg}N*UE;`!U7_s+%a@ii2wNDGdT+)F2AT$1Mz*l{g=NviTX;TFn z$>8t4*9K`~?rN#`JmyEXeTVL*wucc96Qy#Y3b%+-hLDWUiBi50i2G}#zQ%kqVJNDo zx?#1?2>Aj+WN9#SP#_)799`+6@P^o6FnL<4$Zr^RH2NCoctR@`3YuMECbG-d$zVyQ z3P8jnh*MQ{qatfX^~?ryZKb(!s?F?ykLL2)`g&VKU2P4)vnq3AEgfX5thALkR?o7L z?wZEhnf0c@0HgLG4MkC-P=vd0ll$WSeUu-1bMb(_-e?;znTDu5O$UVL`DRq&GkPgF z$r26kgft#*AWiL*hB+#wf|ir4FPKtnonAN1)~LZd^-_p9UBb8YQnYBEj(Z!VzT&#0(E^VB;!UpJ807bnp}xprfA@~+isa*of$in(cPcE}M72foDdJ0#qBh)Z#{ufnO?-esN-r7e1+zurev`Y5r`z0;b86o=2;4Mp~9H02z>ofItkN=$!(%5-UFhuNQ`nrZ&&TC^<@06mnkBaH7o~R%xDvwhPNhdzZDW9g}v+VO}K} zEx|<93`#Q(vi09p?F`S@Db2Nuh7nHXsu{A|-z1NtbN;lO%$a$NDc3y`@cR%S(Me;p zkBr@%3XJC|i^4xY^I*j;y5lZ=iPBG2p}VE6l8Nm6 zB2GA=Pr&gyDZs+fK~4J2`t;5>?f^m1?IXYaKjQ?8+)X_B3C^>~VdB|zJeAaGsbd&6 z3R<2tG&a=PY!#)}dYko1Chz2lLocH?E_-uAn|UpV^5ka>K%oN@;rkZz8sE&*f1O)ye=gu)g9RZOfMpy zAE~U=x3X=c6}MXD7@-WmJWMtVgK_C@a-jZXm@G*R=E~Z7++k7t{ioQ-qs>e<0zh4| z5ts@9?+G}hrpO^e10J6uKOe$XsY=`8ZB$eC3tXHrriwBBd%7+=wa<#|q9J?F>M8Op zUm}6Z^7xlPB5qf#88~l!|#v97zG5D8MxgcQSzDrZw zdi`2tY&jzr9sdF$~2 zY&tD(>`biqUSLeQ9zi=3EA=~B??m_m>ub}`((MdSr zXFX-?*=ySzcES$Z26UCpyr1r?f|Z`NGvKV9a^EpKgZfu6&i_&$ z>~h8iMdFdm*1Ku3G!93dk>6BeC$Fn#>?Y7v`>?3cj3g5TucAhzaUx`hw0dZP{(JXu zS)pvkkDiq+0~Pe%qfB}4jblR0Rh`Rue{jBp4FK`b`KJ&puN-y{ZQVqrL4Mwy*Of5} z4ey~X2dNkE+h2hwjomSaZ~tLYz#qkaIHE%ytjYaZreQxlG}Ab~Nssoz6lpGp;Xi=J zb6pO@vCu%&13SlqGf~q3xXCAwPp6rvNVNfo>^uvTdl6`-XJH!a$Hu#4qCSLW%KpP) z_~}atpZ%)Cu)2;LYhxiXAgsX(r5AXg@jcTEvi(8DnHIqD_Fa%Uw= zAY>UbyXAj6%Nf4ZPwza-K~^7V=0FqzXSv~yf1J>-6BbYSR(4GIRyX(p3&nV@cB2*l z@t{`(UVB16sA)>#*!Y+?%F|}r`Y!cE^mRwsm8pU145%J+ZeT4P<#HLc?laF(dpF-W zN^Na?5i?SupMZ|ieY@sP{kuRf6zwh<`N9Klc)dV(lX~#yC-r8{Ti5lShNxz5GHLc^ zzv{sJ9)Z_M{r|Q}&E9+f^QOBP>^xY6*eAFX&ECYR4G%ZiSwOL?!N#!$&+N@3E*2}o ztlWSgb$S>Y%Kn4FwztcXod!FQkVs1<{6?sW-MCHE0@s6Z|#c7bS!<2HQk z28rfi1zTPeCy2i3^lup8BFTGCLHhjeQ)e)U{Ds9s3;QxlQ89TAU^3qMzHEy7?~eI+ zQ*Y%f2uCN6A7ohk{dqn4Z*Y8n-uQmUY@p8;J(%Yo|lr1A^i15UJw379~nD^&);@ zKO5Oo*S`UdHCk*QtBe&S{I)FR;kH$xgcDEbvx+!9J;^$ZNxCE=pQAZB+b2*Xs8Kg2 zAy+_KsS$YnRJDVQ;D-0FS68(ATDGuhhK zl`->}K?8Y$4*I{*%uueY@(5PL1ZUi^(b~B&@|!5M5?^>pPO`@f5u#}r zxnoNA8C_`SltImIkq8g85T@uYk^l#!D153z+x3z2DwdJ^t)!_(Jwb3!0b8{*_&xQ^ zYYSP*ITt{yv|k(>!NH2x`xgyZu7cK^us8t^4XNmT&kCT2AP^Wd1+78k0e}G7x4nAH zjMCu=93RITlwd<0Xn=fR#3`f6t3`b^;L_1bOu!fFV6Y^(#_=6$e2PH8BlJewguTAj zdSU-!r65sV61jeWoRg8Sk#iyPJmBE?Y%)!iU;uM9{Y2=+dyebh7{f2fU|A?6t3?Zc z7d9|o;P0>@K;iG?fa><{w;=T{Qxw@xXdxR6C~8vRy5npULg*ibhh{Lj_XD}tq^SnN z<<0Aphbm98Nf06@X8uTPPupGljeEat->D#a+UH>!{i*|pe}`8@OJ+xDiDD9Pudzyw zK6>=wmXEV9rs6s;B^R&3Vm!?sIFk`(*s!xe!F%5LA{}>J)MjbOlOy7v`VkCHgp%fr zZkn#8%BJS2djC%s_>#Y3(8ezuw(*zehAq;4;kz1dFH~ZBw1f{f1;v-`nJ;K$^6;tm zu&&1fPEm~z&4qwQUT)%f0A5dL2Yn~=Jdw#8%+vW3tGPybE7vH0C+SMJLb8^w?@fqj zFL^6ZS@L(DqvY>(Dee^hTg^%F{*#}$_n%dB6TF`%TG%`JqcEOa;R76N-BsJFp7VJ> zw^9DiZIr+NSEd6I7&mG@LEopAXH#6vn5dXFCw|9v4HA7{HZ1HV55Kz2xPvBgqAWEK|SyiSp2#D0KtLB7@W9Y0;^LaSK^V|8m>emSC-IME(;@9 z%8@HyKO9K@6lmq8{mGD-gc1n6`JO?$R(Gk8e*LCzFVJ=S^;;{xnyn18cSJ^r{Tpz^ oSfvmbVR4!B;-x1c0C?dWYcCqC>v;$c&YzC&9Q4u?DfScoAMoI8PXGV_ delta 10752 zcmb7K30zdw{y+E5+{+9zEXuwQh-|W_AfmD(7%qt7R>A;-pa`;wnSu0P`m9{=n#TzD zdREp8Lmjh3O)KjunOjyqlk{cU=9ZPu<$soYX68b@|K}Y)cM-8v^R|eQRrWj|1aMJ#lL~-%P9RTbUf@Z6M1HsD0f!she1RHODtqwtaR)M@ zwujbGuEE=Q;KM!zxJ#}2G+zGcx znz#D156`ZUj`4HU>tarRCN-$4IXgzwYZ;Zb4^i{y8Yy2rBscD-o|);Bb25PjAYBYV z%aw)Qov0%WXn~g00Bl+O)S>Kxf-JCKmY%BPJx81JsDgZ#*N+YQ`wD0BdJ->Dbp6})WJ3RTitGOE zj(;-V$+jJ>4;ZJi#`{TzF@+10fQ=tWoBOr9{7Jj(AGAvc>>FAbL61KefSWx3q}}UJ z+L@PN3^SGc{PFk};QPlNz^_j`Bjn$woe2=|C+&tmX%8grBm1jX-ctQkJE#w#1a;Ru z{Tw|s7K&@5#}svr=<5lFt)v^p>Iov|2mU-~;p!)0T#4W*4fgg_EoiM&A+?3pdrVsD_@F$};h#|1a({5)?stBbeG4yJq! zBs&+#j$x*3pAMO6R8=M2c?vrN>K8|fbo2Sh(_r8&O(;g;YfHnHbgy+JZX;LPNVJLljk8s^Q4*C!BvBK=+MEp-OfyE8|E<((xZttFG^Fv(a} z*;sF=DK{7mrImH1RVG7IMNPfQV5~D44VCoC23V zMBE^mGj0tBs)@K2S9g2^z(Nn*OuPmLYPOW%H9p|ob@$#PQ!cHbisjaFpo%;`v&iR% zuHg|yIttNeMtG{e8U-MNsGE890r8q$*QWH=2-@i|-&3`6zM?a9;Z}kFn1}jqaci0u z43X*oDx-0~{H`J1M#Zasl}(g}9Be$2l<;tk;RXKOE&B2L(KsMT=k*ydcA7Xw&_~jE zAPR>F(%UW3(rSIMpw9t7kajkYRTl#wqHw|J6Wp{>V^wbVcN8cV<&IB&>?GE1;`F-L z%XP2pCCJiZ7O+<@3*#>LMONXtOLwUamJ%B{g-{zf<$C3_lOU=69gGT*hF*>^2}Un4o!`00?!NkKM{0)zdaev)o67!akr!*k%Vbd9=Dlig21D`6SY4@#e zrr`Y81Jggzf~{OzvB-aJL1|4dHT2cNh%rfcWFu0ZK;SQ=bplxAR*j^Xv0 z7^OLXj8cTysTO>?c2*27#Q4;ZLtDl3r+-w#>(@2YVd~rb#8N{XzlR_|z?j65Nmz|Ps;(~|(%%c-)v;yO4`M)*DKeD8bw@cJW{CTHhlrdzfn<{Bj14b;Kno9bca zt_^%6H;}m{yZ{z$rq(=S6Mun2yK5pec63OTbm57>KC?I&c_)z#GDLq7yAxhzhF zTQ-R6gBS*Ad<>@aX2vQ2`jkDaiWDC6IbZYrDywImYDYmi7i$;JDim`c-e)QE}R=t9Z^eqDlCWFcmk5vS3Ydn`i^wJmtgBllYca;}9_S+NVnc7{7pFakFx3Tl_OuS)V1 zV}G0m^N$IWVe$xJ{Mf&qzjO{AF<<-Y2pCxM0ki1$_K!iF1vILfl6rO*;M5opxM;?g z6d32tuQDa5VY8#)J?Iz82jJ8yIs!Oac1jVBLw^Z^?T$h?KQsh>;3y2?BNxD%jzXjg z1)(s|Nr+M*pu&77!JrBO0&H*+f~V~dk|W=> z)-*A~ye&#_;JleJWSYa9?+=}jlR1fZ2$Nd55PX+t^4hSRu6#$<0!xgL)CZiMEQG3~ z(Af=ZlZ0qd{pQ-O*IprE4g4xv@Kkxn3pO#K`Iphc$Uz#qMh13b9oqM0r?8v2E<%vY zgsYMTPc9f99xeol$ayuG%HWT~g?qyo$8r?ad~DYt1thM@8;QY3pNY;2SiMn9gZ`rg z4JX3RLNP5M_Q1yUnp&~V>YJozshD%Im@w^AAvZ30%Bv{U084g1Hm|R5YCDOY2|!sC zmN|IlKON~uFAC0xzfDp=I6MX7~uopvS{nnqhqFDC*&MOq75l0Y4osTNG%sefmh07S_ z2%BCHWNDfa;3I+I%Q;!U+4R`v)6YDGV|`LN)wj>H?}P>&GDQHw?~7&{Z@fxlkd ztuT{s1mzrCl!q5i2^p{>RZMk>6%1aAfh$ws%2d%0{(B2wJG{c-jUa+lo6AYt>ApxU zFdt<}9<=(t`x#THE;igpx)ARxbYT`mkMIDzB25Mq-=g-~Liq-I1X7-W)M5}h5p|~h zj2AUXjnyDI6R21p5+}qhf^VdW25u5ObV6{0sZ&HD%TQiZXQ-cVGL;!hYpNS+8tWnr zmF0%n$lo@Y>hN{H+Q2+@B#vh4OgJB4G}M_;Ff+fZvU;{Y6s9591Dn%Dmr?H>3(S&v z9T7C{BjwS6j4{+K(m6HDk*OOh z=b6rYO_jl^FaX3=qkaN?ZfB2xOMv8^Y$PF;pH9(_g8ySxm}+=wEC?SzVUfus4MRZ3d(pvWzq%~e+-kT>bvm0qa@^7Q)EDMrz|38rfiO2){ zBNVo!i(@VLmlLb!&IO&N_+Mqk|J8MLQ6FjNnh5xGhnUOH)WYK(Vl&p;YVMVP$wI&* zyOv`Y>IGHPL=XP*bR+|(G5ZyZj&>q(prtKh;4aWGB8Jx%Y|Z&wipw=}g+y^HVHC%@ zzvBpsn+CwEI8J(b=`$=(OGUX|thCO)6HSiPIyIHZI`34iixJ&v#>l+9R*Z@8vY6;p z37xv1QX*00co_4AgN9r}EE*?C2mrbdFD;`QH$z>wxGhNw9NwW6K9Y}_t@7z-xv&TJ z(gE1dTGC4A;DASUk8t&9`^E9-HiWL}8P;pM-GRE60|99(gW=QDZrJdQI6MKIFKq$6%Y*j)YT~PjiK+}aeR5is1kZLx+&+I#65VYGQ zcEA&3b@ss(^4)H|9F>kwp-QqCru)1`YHss@Zjb7m;eWct0WQaPMnA1U<(P2z<7M%6 z^#iA@0{GIu#i$4*C5`e8K+t1#@hYCg{pfLtT7z3~KdPhwZ2>T#LNpL|GS`MuEvHjh zgOD)#ZZX4x9IF|~5+zt%MQht}FMALt6Dmd6iFYZ=-}8a27SOaP`;~jar>JVeu80QJ z^zTJEL6Y;kXwLcFZupfJIqgx4XQkMrJt7g$icH*jJOW3EsQTkfoG-qx<=AB6PRleD zX%8@h;)-NFI)gFQNSjPt1D;cCO?zzX<-X{$X5!?@z?NS?+%h9#%O|jv0@#uUw!8sb z&cAHDlZqB3nK<+bx0Qx=a@OOno>~RFQ*tFAxJlmdZ(%`D(I?ZeEU7@JhxYC|M>mqr zKs~dLELM}9@EeYJKc1+2ZEoZOGKcIl{KUQ{U>)G1UQa>F-t`xh@94xP<5+)f;+@w9 zk%)u&A=3Tf$GIsqpLXdNaUSQzTmX&&@KpBh4bwL98Rj>B6+3KF6)Sr+2L8^s=FVQt z1MRYZ+|r;wt@$6f?CS$bY7OP|NeK$K?Ee-wN6HugpN>0vQN~yu9+ql@v1mLKK??5y zlI^{K9{xMVLi%Ft0;YjLr@&Y|NVWml+n6eY2hR33<2l7vjCC$?3$$P?#3z-q2o&=u zYcTC6jn`m#DzIG}*m9QDzOJ5Nh~y^T3Pa1GVw;;NBnL`3)4V?&vEkb9Q;GSQw<66) z!^%FP=6lyo*JL#xKK8IAjEEnMyRClCeAYuZ)?&U`KFb@nMeE$nD97YCkVbNESR*;! z^J|#jA;xh_v3t%gmr3Gv|F=(K22*uu75ZRSW+-_AzI#)12{t+i>*E-2@+Z)5Zfq~Y zdw|CFcPCSn#9%3KLACT|P=rrReR$9HQ>=;9+mZh}^Zos&@sXor3GTcwo%I7ncp<~w z46hGE`^9sUXs%!0V1A|Q4(7`NcSnmOOEFnmT` za#OUQB9F8LrphKECcOdiZ;|US;9|u!!YC>@$(0QXw`Z>7|-j5QZ zVJG?s>hQs=w?+}}!+79@-OP*7u~_Ha!m>+@+Al%uiPl3yl<&+pu~z1r*n8ikC`vi8 zE>g41lib(>SiXskVhJYp9{!$^K*0^=`=V~;d*3L0oqYFZ^ptTR`bC4|MCv`+fUZ5t zcjj$bKl8Ti9bK>3BdOUfi#4+A6?RM=P6d5iD?%+tNJvzfYt) zm3+)}`SC;?+723(p=(C>S5ldFuf@n5Xj` zN4|8)u5aJVcMbi&mL0=eZKTZ^eHJrtieHhB#;U?! z#x`91Tx;=_vpo?hl6|(NyE3(NjK%wT7|ZA`s7C}y!30sl3ngf+q4fl=7;Z@thQhbL zaBxjvP6Hzpk7n}IJ|e^&B+-d{HzONR>jeHQX`GmTECOd@<-ciJui=vkMX;hAA|_( z(LPZ9GzdWaBp<=*GnYdt>Wc6%^wW+<{uKrGU@Ia&hC%lH?dlmI3s0FuQFZ0h4Mkuq z9#KW(!00a)F9ToW5lbjgQwx$hR|7R}Nu$vM2_DJU<7X&D_FVb=6ez;?1H=fcGz5-;&JPv(r(6`78(T}+DG+--?f(ZrBinSHYD}+}tt>7mA(`L>IvC_b~6Y#R1 zA0kH4N6=3aS4dHbz8QG?Vm;u3j+pw&n(9$;i>plyrDf$+l_jicXmmn!+~SI|@?kcO zanW(H(L;gsmxN&B6f~dFsW4T}u4ups{UUO)V#%gRxx=&&Wx1-2RejqltE)|QmPVA2 z!6YKl2LfD6yArA {t1}\n" txt += f" Tracks : {tr} {now['n']}/{nb} ({now['n'] / nb * 100:.2f} %)\n" diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 64a81a36..544fd5f5 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -186,7 +186,8 @@ def filled_by_interpolation(self, mask): .. minigallery:: py_eddy_tracker.TrackEddiesObservations.filled_by_interpolation """ - + if self.track.size == 0: + return nb_filled = mask.sum() logger.info("%d obs will be filled (unobserved)", nb_filled) @@ -194,7 +195,6 @@ def filled_by_interpolation(self, mask): index = arange(nb_obs) for field in self.obs.dtype.descr: - # print(f"field : {field}") var = field[0] if ( var in ["n", "virtual", "track", "cost_association"] diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index d969f800..907c2417 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -1328,6 +1328,8 @@ def solve_conflict(cost): def solve_simultaneous(cost): """Write something (TODO)""" mask = ~cost.mask + if mask.size == 0: + return mask # Count number of links by self obs and other obs self_links, other_links = sum_row_column(mask) max_links = max(self_links.max(), other_links.max()) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 58514eb2..6d302c96 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -173,6 +173,8 @@ def normalize_longitude(self): - contour_lon_e (how to do if in raw) - contour_lon_s (how to do if in raw) """ + if self.lon.size == 0: + return lon0 = (self.lon[self.index_from_track] - 180).repeat(self.nb_obs_by_track) logger.debug("Normalize longitude") self.lon[:] = (self.lon - lon0) % 360 + lon0 @@ -228,12 +230,13 @@ def set_global_attr_netcdf(self, h_nc): ) h_nc.date_created = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") t = h_nc.variables[VAR_DESCR_inv["j1"]] - delta = t.max - t.min + 1 - h_nc.time_coverage_duration = "P%dD" % delta - d_start = datetime(1950, 1, 1) + timedelta(int(t.min)) - d_end = datetime(1950, 1, 1) + timedelta(int(t.max)) - h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z") - h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z") + if t.size: + delta = t.max - t.min + 1 + h_nc.time_coverage_duration = "P%dD" % delta + d_start = datetime(1950, 1, 1) + timedelta(int(t.min)) + d_end = datetime(1950, 1, 1) + timedelta(int(t.max)) + h_nc.time_coverage_start = d_start.strftime("%Y-%m-%dT00:00:00Z") + h_nc.time_coverage_end = d_end.strftime("%Y-%m-%dT00:00:00Z") def extract_with_period(self, period, **kwargs): """ diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index cba9985e..577496ff 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -161,10 +161,10 @@ def period(self): """ date_start = datetime(1950, 1, 1) + timedelta( - int(self.class_method.load_file(self.datasets[0]).time[0]) + self.class_method.load_file(self.datasets[0]).time[0] ) date_stop = datetime(1950, 1, 1) + timedelta( - int(self.class_method.load_file(self.datasets[-1]).time[0]) + self.class_method.load_file(self.datasets[-1]).time[0] ) return date_start, date_stop @@ -584,7 +584,10 @@ def prepare_merging(self): def longer_than(self, size_min): """Remove from correspondance table all association for shorter eddies than size_min""" # Identify eddies longer than - i_keep_track = where(self.nb_obs_by_tracks >= size_min)[0] + mask = self.nb_obs_by_tracks >= size_min + if not mask.any(): + return False + i_keep_track = where(mask)[0] # Reduce array self.nb_obs_by_tracks = self.nb_obs_by_tracks[i_keep_track] self.i_current_by_tracks = ( diff --git a/src/scripts/EddyTranslate b/src/scripts/EddyTranslate index 94142132..a710db0d 100644 --- a/src/scripts/EddyTranslate +++ b/src/scripts/EddyTranslate @@ -16,6 +16,7 @@ def id_parser(): ) parser.add_argument("filename_in") parser.add_argument("filename_out") + parser.add_argument("--unraw", action="store_true", help="Load unraw data") return parser @@ -32,10 +33,10 @@ def get_variable_name(filename): return list(h.keys()) -def get_variable(filename, varname): +def get_variable(filename, varname, raw=True): if is_nc(filename): dataset = EddiesObservations.load_from_netcdf( - filename, raw_data=True, include_vars=(varname,) + filename, raw_data=raw, include_vars=(varname,) ) else: dataset = EddiesObservations.load_from_zarr(filename, include_vars=(varname,)) @@ -49,8 +50,8 @@ if __name__ == "__main__": if not is_nc(args.filename_out): h = zarr.open(args.filename_out, "w") for varname in variables: - get_variable(args.filename_in, varname).to_zarr(h) + get_variable(args.filename_in, varname, raw=not args.unraw).to_zarr(h) else: with Dataset(args.filename_out, "w") as h: for varname in variables: - get_variable(args.filename_in, varname).to_netcdf(h) + get_variable(args.filename_in, varname, raw=not args.unraw).to_netcdf(h) From 7e72eff64f9a801de882f177d074b85ed32ce937 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Fri, 18 Jun 2021 09:02:54 +0200 Subject: [PATCH 039/115] modify amplitude storage --- CHANGELOG.rst | 1 + share/tracking.yaml | 3 ++- src/py_eddy_tracker/__init__.py | 2 +- .../data/Anticyclonic_20190223.nc | Bin 908188 -> 909431 bytes 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d7da20d3..1555c223 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -13,6 +13,7 @@ Changed - Now time will be allow second precision in storage on uint32 from 01/01/1950 to 01/01/2086 new identification will be produce with this type, old file could be still loaded. If you use old identification to track use `--unraw` option to unpack old time and store in new format. +- Now amplitude is stored with .1 mm of precision, same advice than time. Fixed ^^^^^ diff --git a/share/tracking.yaml b/share/tracking.yaml index b9c98488..a77e9893 100644 --- a/share/tracking.yaml +++ b/share/tracking.yaml @@ -4,8 +4,9 @@ PATHS: # Path for saving of outputs SAVE_DIR: '/home/emason/toto/' -# Minimum number of observations to store eddy +# Minimal number of timesteps to consider as a long track TRACK_DURATION_MIN: 4 +# Number of timesteps for missing detection VIRTUAL_LENGTH_MAX: 0 CLASS: diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 971914f8..e8e3c590 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -256,7 +256,7 @@ def parse_args(self, *args, **kwargs): old_nc_name=["A"], nc_type="float32", output_type="uint16", - scale_factor=0.001, + scale_factor=0.0001, nc_dims=("obs",), nc_attr=dict( long_name="Amplitude", diff --git a/src/py_eddy_tracker/data/Anticyclonic_20190223.nc b/src/py_eddy_tracker/data/Anticyclonic_20190223.nc index dc5fe0d3693f42f6165c4138e2e6ee8f30f74bf5..4ab8f2261e9e0a80f1f9825babf234cb3097f44e 100644 GIT binary patch delta 8452 zcmb7Jc|4Tg_kZS@#W40Q$u3K_LiVNXl3gfE$R4uG5>qK6YlDaMvCCG3m?C>6S;`tB z6tX0wtfk+WG2hRp{`=i|&D`hSd+t5&d(OG%+&RxZyxc~!T#_(&c`{kVkTL8D2<#P_ zr=(i?X>#mc%G5MsCvY}djSZ&-lSEj{nlj@;VWXgq6V3t@)4Is3<*t54R}(-3gn0}E zYA4~CWYQ!mfQs}0{MxRk&^9yE0+tm?Jupoe@Qldl`cTV_>UJ<124?rCN1=MVeLE_Z;&V#xqs`L1GVMlapGG&M;o>h|e2$O{YuZfSP++5+u z;CtA#rq12N(Z`A4SP#3<>MD`o0)RVZV=D8W-!i$NnL?m+6V-bxqxZN*FK$P4E>fH=#__W28 z8qRNhpMfTY%Q=4d$E|V2r`C-#+TIiroC}e=RlUZI%X7OY5EkcUXLJ%~E?W5%3CSg< zY1j%ZJ7rRydH3;r)vv&e+AI-gq%M2>sRqlT=RP>ez>u3G%eQiGL{`sp$7=t;c6!!x zhiY0pb6x*fux4&uCOs-oRgt83Rw#CD+rUHf0M6u*0*6SQ*yFlbJp=JuMuX2S=!Ubq zO!)_cXqKP0cF~w%RZ}P_MG6X^-+kh4nG(*I+_Uhw)v1pvbvjBX?$QKH8j|%;uBT&x%5a?J4b)& ze1HkdiFbTj#?0zzyxxb!USI4;>?rA)t_(RHXed{>>Jt#?|1JG^MTWVP2!o`U7Qm{VYprm=`o7t#iN5&zV7mVoC z8bxF(vi5K}?!~J*&mnG|?yM69?;0Lzgjd@Ij88tDuDqd_pd*^tZ5UsCO@Ldpmb#B9ZSa1s)#heZJjd7te%KyoeR$cwKK&sha%WvE629YvOh8^qGB-ZpzNudP*&&{g*eO|dA(w*fsxDZR)j7%cSMHja!)?PhEU*~wBQMf)u8VXQ8)`Y)+9LHE)b-<^U}aO!-#%zC zvQpx09pCi6J}kgsWgyl#HN)Zdom1U9QQta^-OTUo&KX$2msJk;j|RBhgs{d3o}npSDN)})3eGezmRv6bG^MPZN9`|d1OT$Ca^V^ZEK8CCVp zzqT=@bG0K?ELxQ#gUzZ`Gut#7hCEECH-4P&?$1s-v@YJ_(=Gf~-K1oq?6gutpo*lE zt(i(BBSVk_hm!eArw)_A?ijWox3FbHEZJKkUY5(73gga(YyqBn4aJ_xX+>6H&^f>2 z<#LLl;l{814xP7`8s@qwtH0WeZ`re-Z!@*J{vhPOd+jOnWYJjuS2E)fV=~9{FlQK- z1!)>kR8#OSPmb?^Qk6Di35Hj0}6Qv;Hz->}_7q!=}=-`KStIH;+aqw^c)|@Vi>0 zXS~ih!vYE`AFz=wg8ECC>ru~FL19z8mH*tS+|2t{%O>!6Q>(IR>)=S$jo#W&dkL>O zzYMQ2gNTO0)TLkMjG1+goy)ot!W$bPp6hT;!^CR+?#?TY;teh`ja=>jlq3xIkP7)oJx&4l@a6Li z_K^6#NMSv}%f(+hy1i6h9yWU`K5co>TP}Y_V`jx6a>hbkJ)Y{zi+!TJ5i;xbc0X+) zv2D1Ts1fw?hESVFn1(q$S3=yd9sA9HanXhHlB)cTCQVoG zOjG&L22QTcc6>p(!qe5c2B8SyC2iA8$>A3?sL317-Yq|aouRFe&ReoTZNr@@*z4?t$1gm zwb#GK!Jvs{I`&8dB_d>E{-;w4XPwn;miiN0Z8e8zDvvLGZ@tvERl-_VIP9$x(4NDo zX2xs~c&`E*QEZ*S-t^(yCFK=6kvtFEuN)h3QG$p^Ewa?}w@W3Rhooz?=;A3n89SNm zqEt>ArIq$X?fPb}3}w zQ2O{LK7TIdSzvp_fKaVu&2nRv6vpD~*Op&8qbw@dFWD;lTr97ZOvR?gh`tmZ7k_ci z*35|Rd!BpQM%;YQTZcBN(y>O|X!#6EG^}Rg`b54Q|6u65Gp&A`jt|QEA|%<_J~h)G zdhdNS(YwBJt`hq!otm!x=ua1?p{B+NQQxqD|WJ$m$S>V zMH@r4d)VJQU6;~Q$kHx)TXQ>ehuX)T$H)^ms9Q?MgVf*5z_>1NrVz!GJjIa?8EK^!*retnD z_bLf#>MdVCry;n!%b}T-azob9o6S9JbXM+?S#9Wxu^_MDFXFn+FNVC7in-o(1>0rB zeN|B%lXq#Lk-9a6O~rbdz%g%-3f9A0hSEM4AwJp4v zX~#BS?u6Y=YaBNWu*j;zwj4UDdcCaQ*xzCPzTU!Wg-lDI@J{2;whc^=OUw_Y8lMxD z?FNhaA^37B%*RW0Ixig;cGGVS8J}qiz)Y<>1bQq_)osj(`gTUpSG_7(y6F+0?ftSQ z_}%+*Q<>O-&a#F(kNd1kjcnh9R`NY{jo&@)uK#^>j^D=L(X4E`Tn4+tjg3{Lv!ir0 zDot!MIb`1SZdKf|>?i*rT+QLi+BPTOv(?g#CUzJ4%H}q2LKU=iy(#HZP4MqH^!Q}|~kE{_woSvO4UOF0%_$_x%0eSJ=+>X~h& zGq~D1+hejcv*FPl79ZJ*4P6^?SQ*!;Kf|uvXt85h=k%p~DX3A^c5{6;zp%QPJ>gkG zz;8O3VR!b%WvdsKmf6%z->nwQe@)9noi5TD`e`TD)>dB6ym>dGE|J+WJ1dp(v}??j zC#A#HFT<0;c}nQ&7>B+OQfj4ZraH%_>-jb@-%Snxyvw(N#=r&fPcuXWK5*Z>o$a%RTUD=rJ^S1v*NZXW(#uS+B102 z-pmwKyZoDG>aVzlTs-qRXU3pb`=<|eZ5Aw~#J-GVsYcR0_Rw?Ywlw0vJL*GI)+vb0( z%RbdO@|v38wkzvin$+$E*M6}Jxo^4p`pd71TsS-@wCm6kl64Y|eWuCt>j1r_W^RCw zV)Tu`ATHirkLlbuNmPP;)U)3X*rW1py}msC+w!B=v1+vEoLWwEqQ<=wum#+JT$b15 z>f`*I2d}jiox3srGzQy*9uf0A1-!U=^knhmr>+n&_V%{xc_J2WX}r51!_qsinoBg_ zba4wYZs&e8{dD9fhYM4o+R@Cm?r0TCYzeZ!5#er^<=7KkQsU3|?9F`A-Hcx;ppisp z_jI#8Y)cvKgC0_sRFQSnqzx>m^3HPlK3(zBD8udUo7rPVTl!Pj3lUtDNi_1d$Ex_s zPf|r9+U`YL=}yvQQP?HNo(CADl6qFYG>_S1V2lf&97KG^`1qPnT3C777cmV?o`9M6 zEKN0bQC6eCTO2`$6Ta)pPps;yu^5ZBGF=41Y3p<;GJ{i_UtMR~1li;%jZrY^Crte< zGiE$p99DXw)Pu9wR83e`_$_oiFK6VG10TokHMJ{iu(78`$J&e)c;0+P1*i@xLC?d` zZ@dkoJYn3qm@P4mSN7;}HSB5{Y-GR$(J>vn7X03}Sp(pYl6_$~Bb%JPE?z-Xm%yBW z#BiOOl}<>53w(K}5I2L}WabKa<-*lTC#WX+%NOB8X?7K>LT8iS%AaU*a%(deuKe|Q z=*=X5u=W6Q|MKa}Bnu ze(LFE`nN^EO9MFH-K$Co6$;!wk7QPAUpvQ^DAxK#WtR`jj;S)!76fY)+HZlut(`oR z4%R2VO?iXiSmj-NbP`QE>^v91TV0Gm=yMu2JFNV$;nEXynr-%{m$-jG#oJ^hogd`> zb=B1GvI8pVRm+#v^;R^m`CFLV4bj4cLf-t_Kmto=mo4+L8v%5fq@ziwn&ZY??Ax!I zlG>LrHY@4NjT~pMNww0o7x_#~25D06&L9kZ@`dRNur{>-;~cXf_=TTk5xsncHh3=J z672agp_2Th0}QH(!IyaJCs|P#yUR&NNzDRu49C}gQ$*98&w2agku%T4<|!0ka7ICg zArnmG>a~}vkD@xRL~xBy+NJu_gV8gVswbQPiZO;sQ_$ye@NOjkCiq2G*GZ3McJ0ZG z8n@Qud2IBYzj4BwBpO!9gYp^C)b@*D>SYv%wsQJuVc5XPU`rUYUe_DroV9@|_b9uH zZ1m)i76Q0wW6`X8l2x_Ee){Nj`sq8?^z;-e(OG^I}YNK9%+FQ4yWmHHU~bMCSZ zO2BS?1WzaGP@PeraMt+oMLAvMI&#>Ro;yHVKE+IQvH@0a{RG{b-4m&k{`$7?Rhg+N zc&iOVV)B+c?ED&VE#bPnJSH_tnb{B98DUC43rJ%qyD^hTKzJ+hla13-i<2@5`{3AI2` zovofG29H@m<9H8SU)VW_+0S}NO;y?do!oi9f+mjs%eg2SWAHb=_uu}eg5_F0+nc!a zQ9T=yyntlG(fF{zf$%?Y2nFk)z@{jiPXq_!MiL1{6e(8&bSPG5Md*WwnNwq}I67}G z9Q8FcoRG=c!;BqVN06w2mxoGu|He#PMOx!u%)|fCW&Dkq_VM3T_lo*{?=72keSh)& z>PW)Cy)&j-S}*_x{N5{&>IEbQ_wyPLd?OveS5BdKDRG=}0^qVDj_0rq?ON{s9stE| z9f#`jxCaRSj+>;#tfNTvwf!1>V6RP3(TyTpc(MXQvEK6+t}a;?-nEE=l1(uz49z2U za{!>`cl?r(uBItSvyQt4!bub-G?NSZGG+UUd^6I}J&k4}p#fjm{5xu9 z-i7x3TgLqBDB`mJ%9tvN_T=9(=HL9AswvPWrmXZQ(IlXjJdg+e>?yb>LO2!v^8@f4 zf_`wDq+2y=fFN%GAf^PNl}a!NarCZiKOq|_w6}X-P+k&}s0L-(gct~UKtk!=qb3x3 z`n_L2Md(U3$b)$8{#X#osRjkXb5EgH)u4@)*D#451~{*)XA>bK_m0}$F>n$Vq9Kj4 z1MGXt5X#~APe=z6wMRuq=pj(i{Y~|-fB6;^@feilm^CCFD1PE>03g?I=3TEtk^@Ml z=ueM9!WWMJ^ccaR!az0+B6)Y(a4DXkCUyiI2}hC+t=mD&A_ZQ27x9CRo}9PcBcALk zz(Z2B$YgNG7kWX96a}-cL*HnTVqi=v#6yRa0GHArV>%=+=#7W`>5u|vizN5Ajv%LM z(Vh+g?g4>L&XLOGA+kH5CJ)(pIRv|%AP*DNLM)V?-j`g7Q%|cyD|ARMu&)=Qr$?&5 zg*lOXIoaP7cq2CCA!cxs8S)fE+92R1l9f=G7_xw}M~Spr4RlQnC4$cvM{>XroLyBh zB)fPofnGtd?JE|+k+^4ZZ}d6L`aQVoQB5UAk)FTVSLbABpXbsaZLY-wyvf+ z@IR#!><{tsrYj6VyRQW5OMzZ?j^397yaWCG@dHqEFOmbS*MdIm)d$Z(KYNjfzz!Yg zU>`w0cXA+wKBNG6#S^mcLkfXo`A|$BQW%_Qg{u3IB4E>JXrd3vsSvzQ^5!rE+ddma zfV-D-&;sc&!w?6_+yfngT3;-FqDni8VkLzyJRXVCIRcI-)gDE~ zF@Pn~d#=h5QPS|m1DZUEN(a{p@u6xc9~eULdq@@ZRUK78iS#64zf3eZ+=iC`w4i~C zr%_UR^NzR~+oJ;QBn{rvM6rR1CWOU7H2#S`iVF><+=mDYO5!QaQF1V*p(mD;e>h92 zOOSOI>I_t5fih&CtQU{^V|1wi1$~`HnL##|C=2U0Q}jVgmLE~0S05r-HJT1iY~ibx^Px(FJ}Mu}6d$hai~XWy!&fpu|bEJ+_1Ke(2xkb$NL25(xm5gb%5tRb(Ch zJ5*mD{hP|-W`!CNwX`Kk05EYF&`;E}vAEo;{b&nJ?3=(|o9H)wo6cPzZ6{$0%>%}4 z;vY?+k5hp;KcFO23@c*kn}~7k7c>M)>@ z$Wfh*yY~-0?QlZNE9g?hMEA>K=$}<|I^6CU1`7X#I)-=qhE{+fdid5b(S>Vh!j}?X zxJGrAmw{|U#I>DAbZST!qIwogDjXCQgpwsNtaw7S6=VdT&O=F}m>PP&s)oQlz9i>l zJ%cwF!wkS=e}z3VAR;}rZwxS97(jZZIvx^OOkhBEqNJSx0N2!@21zQHB`|E@MI}6e z8}<;_x&0TgFbV+J+LIydEC3Y42@elYWk6Y9(P3(CB z48h8Xg?9`?hRhG+RB=ZD-w+7>Oq<=o*d;feu_F1A?StMSSBF815=`-1Zke2#e z8RSq#8U=vSF-d=7q$Qtq;(&U^6aupUhTy?-m|?cz;Ho3ZLWqB&@N9M%K^S(&m%Ik4 zCQF6fOO^_`y|n*^o!s`ulN=|xO}?R#+vIB*xlO)@?YFVOX?0RmB@^`Z{@KP4{MoM8 z`cpIq&;8l%(D}1{C+E-hmHkLc;^zVYWIdAsko8P%|9^EswhQw7pk|R!(!r^7!0>=I xJ|rdBe@4wwMvr=62w!Uas0U5E2Lt(>5U1X>crLtmzD;q32O06K!!%Lw{{t1?Q277= delta 7210 zcmb7Jc|26#`@eH%?ij{SvW65=$r2F?A!XlXNwy@irbRI%lu9(^D*KjwPlHnVNJ^45 zDP(-?OP11vfl1Y2wq`~Bm@iNBT>6IWr zdlViZt?h&On0w5%@$^B!HeQRLsEd-v2pWC60{fCcDR}N67E8=U6@%VRL~C$|?rdpw zi?*e{4uAuw9Jr|K+@wpcBgt?uSjep>!OIy0Y(Q4ym#e$Hs|X^abEu_E-y| zvVbv0e4>Ywcts!uw8O`1@u4+j0JvQd+Cb9W0F5&dH8#6~r}kE6ZWc$0#%!{_K?g!E zLIg%Bqkv}m&k*4`PU4WnJ@!-U7y+PiuRNI;v*i+~;7MfP%7;17MAoX5-ntPY%vgJR zJ2nNCOsTE3)CE#j>sAIrRxS+NzdUH(x9wJ@{=M6}GMjaad3PQ;I&6Duqt3#+YMREZ4ie=S&y=|_`6>S}b4zJ~m~cEsn)3mHC405KR_of&ZKlHq+xf=5gx<5|sRVF| z$$VJx`h0ixV&G^}U~5a48#kBUB|Sp)y^AwRTCW71+@tOlTV_}+HJdj|?+STQPB3wJ z)EK23a`4D$dqO;~QK8Xzh^a+st(Ak`Zc3&zcvse^F)3zAZ5pFbZ z6)Uq48CQ=L7-AZ?Y?MbmAKjqJOsR>q!}ljc zAKQ|i>X&YrC(d7Uye=pQBzNNDtjDdGt3Vku!3EX^dzB=Y>_^?+&Ok7qrb%pHf`qDHmW{6U(Y??Ft zqF6rP`T1gj<~1+plQ9?ugfRFhaN7zuFcoSQF`vDZA#K1)l}WGGiK(er|-V zp2dH&ec=*tk3B`BW@k%5)Qhi$t9Qn}$jgbfS;cCGg-6JJ-HFM#DSjL5eBLQd@U>$- zSKVU8X3*)DLjdD0*r{JjTGC~MWS_+=pk5DNp-ddzq?;lA{ruhUlss+!!&fZ?9?st` z2r67Hmkh*UQstbE1^y5N{O7^Ps_^;WZSF}2N7Hjv$#Oe7ngI5X;OkNO_ zGmkKrsTY$hnAA=i9_sN_YW!we zzAmQR5Diux<*T?8#GU1jU(0@0q%Y_DA-p$Vw=(v`qw#>G-fMO53oo2Hf0{5@F`e&d zn*y27#$>#1ZcU%cs{hEeokx_3XxEm!nJwsCazkRQ@6t{03ftB-fjP?(gKCV-=8`UN ziSkwNoi-819=3%_X5yRM?|9x|g3=$DUzuQiPh?G-(#wqRt_s@O=d@&|$?BXwwxVLe zmLk$B!JIxGbaP)=q1~fZo8W$eigkC?hx%&OlV2AWeYd8CHfM*$eQn>_YE*S7N#Y<; z;{4fc4$7O+@#cHzIMd(_>iKzbUgyb5MUh>kYcIzp1jWyv8NMev`}(GRCyVZln25o_ zsH0hTJL<>u8`3h!eVn_K6y;@9L^+tGwhvloF~ z3r()wZ$9FER9wzHh-Fb)Dp!zw^-VO|x;d(910~dZkr2z?1{-%#x+b^ajwk(78D)QQ z+EjAQ_Fd|Tp>$oHn_OZj#bg?PUVFRaYr(Hg-Q7)tGd8ShS)VyiM!xU5>Ch1}t552w zeq-3cUW49yKUOd0V)6dzJJQv62rYS+W&7sk&mNN!4JoN1G<({vCYc>NEPb7n7d zYUj8Cl-KI%^zTuIjy+6H(_!FU|N(`IRmTw|J&Bpe(z_OJ{)ADMDkkPEpok zQM7x`YPEP^P1%ZG`Sk68YJ+$luC`XZ@Y=HU8yPl2>1f)Ip|Q@gn`|yKttPq^3s@oLQ0Q0Vz8Uu^(*th>(>Bd&_9<)o7Vo~gxM)_iU+eo4iKnqAXnA+1Lwg z*jUWtN;vl}fgD^$IB{ZdYg<}*f?34z2?^JQCupa#UHkgI+K$iQqbsb$V_(`IlB_*@ zuB(u5p|0sg`R$}v!JofMm2NX2E57NwHdFU%c1VrVMH$R}tbKd!NJ(6Nn=po8K(caF zTo5@l;mN`xnNZK@0uw`^C?LL!A9GCUy2?mt{&-5BGmS)@9m+agxYOlvN}7> z^}TdL>e4@&b-5{T3-3#Rbo!?$^44dy5QU=4>_UTQ2^5_v$A<1^qui6{gw6_!bbRkB z+?VU$RA!PipglXu>(;om41wIU@dWR5d7WJ^I2;_hrD`9TUu?Wfao=8=VnyQCu0ofm z?BRRfCw09pG_SkCWWcp=!f?@D(rZ7z;u`s|%C?)TUEP&R9=j=X(5|UBi53Dn4zRH8 zuCrNH3w|B)hBFIxZmh#HBKvzrdq;Z*lzNs!?iWqJB1}vl@bUk4xJ@A|J4lr9%DpI{ zz2mvtWB6fe@JCA3wkFX6j}e{({V31VIhAQ4mI-*zN2?F_kF1@}4jApb^LfZ?hi!6* zSJchN^QSbX+|mAILs;)FNwn@$aBFrXQNk%OQ+sqGRQ4OE{H#;(yp+fNgsQVgLj!|c zmQ$v`HMaj~cgR|r`SE1m%FoJ*kA&5QK2pWn_oYA^kAc)}scY?%WPDRJQ{VN@-PhV9 zZa(F$o7VWzFq<1;!=9S=$@RYLi%O}V<0PN2D>FjSH$T!v-1JJD)5PL-)NJ7DtZ3cS zUdlny>O!H*A!1EyBd<7!gL?{%_t>-ddC&J1jeR@nu(s>{1@k`d2qCq5LI)fPzP>#Z z`+MXa-+npJ`*RExn&*BdoM18anPpf~@kj0HrNy~vmtObp?xm0H3Fz8^fRynwk^Wh_ zrzISoYCic$9!u!wc{+HuKaFE|QTSc&f(J%A;y*Ad9lBhn$ET%2avoVB!vR z$rO$#zQx`}Yy%!)2DpEsg#KOZ0Mj`7?5~~nqK*yzjR*MrH&r#}Y7A{*XZPuyu-Pb% zpE!P+AHDV5I)sXlLs)P)oM;ds%q_fLPemam3-DQ~@_d9Lh%qVOFb>C?;y1BSix~Pt zE+WoHIuMrUF-RZa8h1DT8?j`Zv{4cdL3D+lG*J=Ic#WMm%6? zuB(fpT4#;+z(+@Mcw$eK7+Ux?Qi)Y^M6A;UnTqLIR6oIkKlS3g)T?r7NpoWfVqCX)-HfHZgiO5k;C!1euw+7@DRtJ7<$> z2XhJHBT2EDU;rAttOI-MVnyI*_d$Q2`)I=jnp@&{XtZb$oV58hYr+0n9C3l)s$%_l z1nDoeKatri@VDCazx++LeK@wOU}lN#BXDvqD266K0O|EDRjU)sq`GqOCBv`P&_Dh3Qp&Nwu22J@Eaaz2UrpZukb*!AoneJJ1-;- z-mZX+d7-T$Ej!kQ#4z4a;Jlqrd!7z*bI;oY$MHfU;Ela-DK8`l`f0$gd7({cN(%(f zir}}voEsp1(1;&yQ-t_vZx|q9P=pgcC=WT{-&X&NGSSyD2ic~G;V4PCOdaxvO~$cU z@QX6ZNC7fJq3Z*MV0j;`0Q^vq%I3>H5|s@ehi;IC=&v(id3DH`_16C_GsZwH+ZUpejWS);xms2b(%cX5A1! zO5se~>WH4cjyCY0L8)HU#ND`r!m!i*003Bhf=)TP`ud#q4GIYCdkZJMqpEyR6Rx4Q z`}bKQ=LLnKy?dfPEP;OR?ru)b0q)M9wHcG#$Q;17K6%DHz}dr{lmzqkKwH5K4_K`S zk^)g_uv-r#4Pu_au|1FsI6MT`_CO*6Df3A5Mq&8Z`Cv|ao^nqbf#;~xXxYPfVmPa# zmY)8h-{hcDim)SYcB>s}>Cy4GFIG9(JG3Ce$w^~N(1c6gLuQy`rA&qKT7o2zUMO%I zSOvlHhp>E=ar-BkNViz92e*Mgl}Xeg5J&1)!^SX!t+KSpV~p`6>+-^nbg_vbe+J1) z59^1*OS7{?Bf1a&%OPbvU|)Ugzih`{v0w7$;bh0aJO& zlZKCA_n>&lLX$~fJ(G$>hf)0lMg*j83k09!Ipdu1HMniun#e34TZH}HXp1N812tc;1mBy7C}#N)L#z&)mTPUa!O z01e=L%)IjY!O5XQ*UrnOLdTIri?Q{*pkXQv`7t~_wNZ1FTM-A6#SKx2@U`*OaO}vo zi0lObu2=VG!k_-Z7D;gUA`}_xZPR-DUz$8ahD#c-C)l9YTYhxX3m=wn2okmDR3zt` z7|NJr%C&023Nr(9h;KK-oYmkqQuH`h7R3V_T~GZ(FHW-X%L!}&euqPH0OC;cjKjvfZc+gAWIYs7dDBJCHD>xoGmHwlIxVW7&W_MIh6oQvUm^v%)}?9)ip zyOAIUZ7PocFELY3n5-mTjRhPShP~wQRh)L)?$K}WNQ4U;B+1F+yHLu(dXJ9K!TYRx z2}&OYRMWw;efM}w-GUKv5n%zK#zN5!@ndsEJU{xT^7^LK1Wb*K-fDOP4;cCYo-xMr zW86!VkHO3)_#%lHW{7NG1`!Iy*Ud`(ABp0L&cFTPnK81#uNc-Jb^kJQL)E9>P{EzDaJU(s57QXAJC>AWhOfX_ zGH%ce=O~Fo#ANhQX}_F>HuJml(E8MbLu;t_ZRFt&-}e*Fu*OTGsZL0TK4y*Iw;{O) zaaI86Fn!~XfZBWn@@n z8ZPgE#7Gny{4yHN=Y%KGiz?E*D_$B!5Ie=#4`~^?V2m78wX<%!3#Mm)M{azIl9UZPyMixs{GiR Zyto+WgT6F@%MGGMOmotwKDI>k{{Z1EEUy3n From 700f91c57bef429f0af99a8eec448b3e8a7c48cf Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 21 Jun 2021 22:33:42 +0200 Subject: [PATCH 040/115] manage time in network --- examples/16_network/pet_follow_particle.py | 4 ++-- .../16_network/pet_follow_particle.ipynb | 2 +- .../16_network/pet_segmentation_anim.ipynb | 14 +++++++------- src/py_eddy_tracker/observations/tracking.py | 18 ++++++++++++------ 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 078d6fbf..a5a252e2 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -133,12 +133,12 @@ def update(frame): shape = (n.obs.size, 2) # Forward run i_target_f, pct_target_f = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") -for t in range(t_start, t_end - dt): +for t in arange(t_start, t_end - dt): particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt) # Backward run i_target_b, pct_target_b = -ones(shape, dtype="i4"), zeros(shape, dtype="i1") -for t in range(t_start + dt, t_end): +for t in arange(t_start + dt, t_end): particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt) # %% diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index b6723a97..15820ad3 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in range(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" + "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" ] }, { diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index 05c68873..34047da4 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Network segmentation process\n" + "\nNetwork segmentation process\n============================\n" ] }, { @@ -62,7 +62,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Load data\nLoad data where observations are put in same network but no segmentation\n\n" + "Load data\n---------\nLoad data where observations are put in same network but no segmentation\n\n" ] }, { @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Do segmentation\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n" + "Do segmentation\n---------------\nSegmentation based on maximum overlap, temporal window for candidates = 5 days\n\n" ] }, { @@ -98,7 +98,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\n\n" + "Anim\n----\n\n" ] }, { @@ -109,14 +109,14 @@ }, "outputs": [], "source": [ - "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames],\n e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" + "def update(i_frame):\n tr = TRACKS[i_frame]\n mappable_tracks.set_array(tr)\n s = 40 * ones(tr.shape)\n s[tr == 0] = 4\n mappable_tracks.set_sizes(s)\n\n indices_frames = INDICES[i_frame]\n mappable_CONTOUR.set_data(\n e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames],\n )\n mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)])\n return (mappable_tracks,)\n\n\nfig = plt.figure(figsize=(16, 9), dpi=60)\nax = fig.add_axes([0.04, 0.06, 0.94, 0.88], projection=GUI_AXES)\nax.set_title(f\"{len(e)} observations to segment\")\nax.set_xlim(19, 29), ax.set_ylim(31, 35.5), ax.grid()\nvmax = TRACKS[-1].max()\ncmap = ListedColormap([\"gray\", *e.COLORS[:-1]], name=\"from_list\", N=vmax)\nmappable_tracks = ax.scatter(\n e.lon, e.lat, c=TRACKS[0], cmap=cmap, vmin=0, vmax=vmax, s=20\n)\nmappable_CONTOUR = ax.plot(\n e.contour_lon_e[INDICES[0]], e.contour_lat_e[INDICES[0]], color=cmap.colors[0]\n)[0]\nani = VideoAnimation(fig, update, frames=range(1, len(TRACKS), 4), interval=125)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Final Result\n\n" + "Final Result\n------------\n\n" ] }, { @@ -147,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 6d302c96..492842c7 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -16,6 +16,7 @@ degrees, empty, histogram, + int_, median, nan, ones, @@ -601,6 +602,12 @@ def plot(self, ax, ref=None, **kwargs): def split_network(self, intern=True, **kwargs): """Return each group (network) divided in segments""" + # Find timestep of dataset + # FIXME : how to know exact time sampling + t = unique(self.time) + dts = t[1:] - t[:-1] + timestep = median(dts) + track_s, track_e, track_ref = build_index(self.tracks) ids = empty( len(self), @@ -614,7 +621,7 @@ def split_network(self, intern=True, **kwargs): ("next_obs", "i4"), ], ) - ids["group"], ids["time"] = self.tracks, self.time + ids["group"], ids["time"] = self.tracks, int_(self.time / timestep) # Initialisation # To store the id of the segments, the backward and forward cost associations ids["track"], ids["previous_cost"], ids["next_cost"] = 0, 0, 0 @@ -641,6 +648,7 @@ def split_network(self, intern=True, **kwargs): local_ids["next_obs"][m] += i_s if display_iteration: print() + ids["time"] *= timestep return ids def set_tracks(self, x, y, ids, window, **kwargs): @@ -652,8 +660,7 @@ def set_tracks(self, x, y, ids, window, **kwargs): :param ndarray ids: several fields like time, group, ... :param int windows: number of days where observations could missed """ - - time_index = build_index(ids["time"]) + time_index = build_index((ids["time"]).astype("i4")) nb = x.shape[0] used = zeros(nb, dtype="bool") track_id = 1 @@ -698,8 +705,7 @@ def get_previous_obs( i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs ): """Backward association of observations to the segments""" - - time_cur = ids["time"][i_current] + time_cur = int_(ids["time"][i_current]) t0, t1 = time_cur - 1 - time_ref, max(time_cur - window - time_ref, 0) for t_step in range(t0, t1 - 1, -1): i0, i1 = time_s[t_step], time_e[t_step] @@ -729,7 +735,7 @@ def get_previous_obs( def get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 - time_cur = ids["time"][i_current] + time_cur = int_(ids["time"][i_current]) t0, t1 = time_cur + 1 - time_ref, min(time_cur + window - time_ref, time_max) if t0 > time_max: return -1 From 2b1eb7fb447e8b8216f7115165540e7a631d7587 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Fri, 25 Jun 2021 21:42:55 +0200 Subject: [PATCH 041/115] English corrections & missing changelog version --- CHANGELOG.rst | 19 +++++++++++++------ doc/run_tracking.rst | 14 +++++++------- share/tracking.yaml | 10 +++++----- src/py_eddy_tracker/appli/eddies.py | 2 +- 4 files changed, 26 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 1555c223..f33f15dd 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -10,18 +10,25 @@ and this project adheres to `Semantic Versioning 1: - raise Exception("Several days steps in grid dataset %s" % steps) + raise Exception("Several timesteps in grid dataset %s" % steps) if sub_sampling_step != 1: logger.info("Grid subsampling %d", sub_sampling_step) From 8e262304ba8be2d0b91136f8294be0aaafc2519a Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Mon, 28 Jun 2021 21:48:57 +0200 Subject: [PATCH 042/115] create specific environement.yml for binder (#97) --- environment.yml | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 environment.yml diff --git a/environment.yml b/environment.yml new file mode 100644 index 00000000..941bb7aa --- /dev/null +++ b/environment.yml @@ -0,0 +1,11 @@ +name: binder-pyeddytracker +channels: + - conda-forge + - defaults +dependencies: + - python=3.7 + - ffmpeg + - pip: + - -r file:requirements.txt + - pyeddytrackersample + - . From 43c2616bdc5f267bed21dc334de49631a5dda20b Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Mon, 28 Jun 2021 22:39:06 +0200 Subject: [PATCH 043/115] Add documentation and create method to detect time format --- src/py_eddy_tracker/__init__.py | 10 ++++++++++ src/py_eddy_tracker/appli/eddies.py | 20 ++++++++----------- src/py_eddy_tracker/appli/grid.py | 14 ++----------- .../observations/observation.py | 9 +++++++-- src/scripts/EddyTranslate | 3 ++- 5 files changed, 29 insertions(+), 27 deletions(-) diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index e8e3c590..fbeb1450 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -22,6 +22,7 @@ import logging from argparse import ArgumentParser +from datetime import datetime import zarr @@ -109,6 +110,15 @@ def parse_args(self, *args, **kwargs): TIME_MODELS = ["%Y%m%d", "%Y%m%d%H%M%S", "%Y%m%dT%H%M%S"] +def identify_time(str_date): + for model in TIME_MODELS: + try: + return datetime.strptime(str_date, model) + except ValueError: + pass + raise Exception("No time model found") + + VAR_DESCR = dict( time=dict( attr_name="time", diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index 701162a4..4809fddf 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -15,7 +15,7 @@ from numpy import bincount, bytes_, empty, in1d, unique from yaml import safe_load -from .. import TIME_MODELS, EddyParser +from .. import EddyParser, identify_time from ..observations.observation import EddiesObservations, reverse_index from ..observations.tracking import TrackEddiesObservations from ..tracking import Correspondances @@ -163,7 +163,12 @@ def eddies_tracking(): parser.add_argument( "--zarr", action="store_true", help="Output will be wrote in zarr" ) - parser.add_argument("--unraw", action="store_true", help="Load unraw data") + parser.add_argument( + "--unraw", + action="store_true", + help="Load unraw data, use only for netcdf." + "If unraw is active, netcdf is loaded without apply scalefactor and add_offset.", + ) parser.add_argument( "--blank_period", type=int, @@ -265,16 +270,7 @@ def browse_dataset_in( if str_date is not None: if date_model is None: - model_found = False - for model in TIME_MODELS: - try: - item["date"] = datetime.strptime(str_date, model) - model_found = True - break - except ValueError: - pass - if not model_found: - raise Exception("No time model found") + item["date"] = identify_time(str_date) else: item["date"] = datetime.strptime(str_date, date_model) diff --git a/src/py_eddy_tracker/appli/grid.py b/src/py_eddy_tracker/appli/grid.py index 7a746a8f..099465ee 100644 --- a/src/py_eddy_tracker/appli/grid.py +++ b/src/py_eddy_tracker/appli/grid.py @@ -3,9 +3,8 @@ All entry point to manipulate grid """ from argparse import Action -from datetime import datetime -from .. import TIME_MODELS, EddyParser +from .. import EddyParser, identify_time from ..dataset.grid import RegularGridDataset, UnRegularGridDataset @@ -121,16 +120,7 @@ def eddy_id(args=None): cut_wavelength = [0, *cut_wavelength] inf_bnds, upper_bnds = cut_wavelength - model_found = False - for model in TIME_MODELS: - try: - date = datetime.strptime(args.datetime, model) - model_found = True - break - except ValueError: - pass - if not model_found: - raise Exception("No time model found") + date = identify_time(args.datetime) kwargs = dict( step=args.isoline_step, shape_error=args.fit_errmax, diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 907c2417..db0c2a45 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -69,7 +69,7 @@ poly_indexs, reduce_size, vertice_overlap, - winding_number_poly + winding_number_poly, ) logger = logging.getLogger("pet") @@ -1326,7 +1326,12 @@ def solve_conflict(cost): @staticmethod def solve_simultaneous(cost): - """Write something (TODO)""" + """Deduce link from cost matrix. + + :param array(float) cost: Cost for each available link + :return: return a boolean mask array, True for each valid couple + :rtype: array(bool) + """ mask = ~cost.mask if mask.size == 0: return mask diff --git a/src/scripts/EddyTranslate b/src/scripts/EddyTranslate index a710db0d..26ab3a7b 100644 --- a/src/scripts/EddyTranslate +++ b/src/scripts/EddyTranslate @@ -16,7 +16,8 @@ def id_parser(): ) parser.add_argument("filename_in") parser.add_argument("filename_out") - parser.add_argument("--unraw", action="store_true", help="Load unraw data") + parser.add_argument("--unraw", action="store_true", help="Load unraw data, use only for netcdf." + "If unraw is active, netcdf is loaded without apply scalefactor and add_offset.") return parser From 08c2393a053ee9bffcf77957fd369ddc120a0d1a Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Tue, 17 Aug 2021 12:14:07 +0200 Subject: [PATCH 044/115] - Majuscules + orthographe - n.period in int for particle advection - mise en page --- .../pet_eddy_detection_ACC.py | 13 +- examples/06_grid_manipulation/pet_lavd.py | 6 +- examples/16_network/pet_atlas.py | 28 +-- examples/16_network/pet_follow_particle.py | 4 +- examples/16_network/pet_relative.py | 6 +- .../16_network/pet_replay_segmentation.py | 8 +- examples/16_network/pet_segmentation_anim.py | 3 +- src/py_eddy_tracker/__init__.py | 15 +- src/py_eddy_tracker/appli/eddies.py | 18 +- src/py_eddy_tracker/appli/network.py | 4 +- src/py_eddy_tracker/eddy_feature.py | 19 +- src/py_eddy_tracker/observations/groups.py | 37 +++- src/py_eddy_tracker/observations/network.py | 175 ++++++++++++++---- .../observations/observation.py | 23 ++- src/py_eddy_tracker/observations/tracking.py | 33 +++- tests/test_grid.py | 10 +- 16 files changed, 306 insertions(+), 96 deletions(-) diff --git a/examples/02_eddy_identification/pet_eddy_detection_ACC.py b/examples/02_eddy_identification/pet_eddy_detection_ACC.py index e6c5e381..c799a45e 100644 --- a/examples/02_eddy_identification/pet_eddy_detection_ACC.py +++ b/examples/02_eddy_identification/pet_eddy_detection_ACC.py @@ -65,7 +65,8 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" y_name="latitude", # Manual area subset indexs=dict( - latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin), + latitude=slice(100 - margin, 220 + margin), + longitude=slice(0, 230 + margin), ), ) g_raw = RegularGridDataset(**kw_data) @@ -187,10 +188,16 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" ax.set_ylabel("With filter") ax.plot( - a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic", + a_[field][i_a] * factor, + a[field][j_a] * factor, + "r.", + label="Anticyclonic", ) ax.plot( - c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic", + c_[field][i_c] * factor, + c[field][j_c] * factor, + "b.", + label="Cyclonic", ) ax.set_aspect("equal"), ax.grid() ax.plot((0, 1000), (0, 1000), "g") diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index d96c0b06..e597821c 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -159,7 +159,11 @@ def update(i_frame): # Format LAVD data lavd = RegularGridDataset.with_array( coordinates=("lon", "lat"), - datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,), + datas=dict( + lavd=lavd.T, + lon=x_g, + lat=y_g, + ), centered=True, ) diff --git a/examples/16_network/pet_atlas.py b/examples/16_network/pet_atlas.py index 7f86790a..6927f169 100644 --- a/examples/16_network/pet_atlas.py +++ b/examples/16_network/pet_atlas.py @@ -153,33 +153,33 @@ def update_axes(ax, mappable=None): update_axes(ax, m).set_label("Pixel used in % all atlas") # %% -# All Spliting -# ------------ -# Display the occurence of spliting events +# All splitting +# ------------- +# Display the occurence of splitting events ax = start_axes("") -g_all_spliting = n.spliting_event().grid_count(bins) -m = g_all_spliting.display(ax, **kw_time, vmin=0, vmax=1) +g_all_splitting = n.splitting_event().grid_count(bins) +m = g_all_splitting.display(ax, **kw_time, vmin=0, vmax=1) update_axes(ax, m).set_label("Pixel used in % of time") # %% -# Ratio spliting events / eddy presence +# Ratio splitting events / eddy presence ax = start_axes("") -g_ = g_all_spliting.vars["count"] * 100.0 / g_all.vars["count"] -m = g_all_spliting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_) +g_ = g_all_splitting.vars["count"] * 100.0 / g_all.vars["count"] +m = g_all_splitting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_) update_axes(ax, m).set_label("Pixel used in % all atlas") # %% -# Spliting in networks longer than 10 days -# ---------------------------------------- +# splitting in networks longer than 10 days +# ----------------------------------------- ax = start_axes("") -g_10_spliting = n10.spliting_event().grid_count(bins) -m = g_10_spliting.display(ax, **kw_time, vmin=0, vmax=1) +g_10_splitting = n10.splitting_event().grid_count(bins) +m = g_10_splitting.display(ax, **kw_time, vmin=0, vmax=1) update_axes(ax, m).set_label("Pixel used in % of time") # %% ax = start_axes("") g_ = ma.array( - g_10_spliting.vars["count"] * 100.0 / g_10.vars["count"], + g_10_splitting.vars["count"] * 100.0 / g_10.vars["count"], mask=g_10.vars["count"] < 365, ) -m = g_10_spliting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_) +m = g_10_splitting.display(ax, **kw_ratio, vmin=0, vmax=5, name=g_) update_axes(ax, m).set_label("Pixel used in % all atlas") diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index a5a252e2..1c858879 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -125,9 +125,11 @@ def update(frame): # %% # Particle advection # ^^^^^^^^^^^^^^^^^^ +# Advection from speed contour to speed contour (default) + step = 1 / 60.0 -t_start, t_end = n.period +t_start, t_end = int(n.period[0]), int(n.period[1]) dt = 14 shape = (n.obs.size, 2) diff --git a/examples/16_network/pet_relative.py b/examples/16_network/pet_relative.py index c4989edb..f5e8bc92 100644 --- a/examples/16_network/pet_relative.py +++ b/examples/16_network/pet_relative.py @@ -292,13 +292,13 @@ m1 # %% -# Get spliting event -# ------------------ +# Get splitting event +# ------------------- # Display the position of the eddies before a splitting fig = plt.figure(figsize=(15, 8)) ax = fig.add_axes([0.04, 0.06, 0.90, 0.88], projection=GUI_AXES) n.plot(ax, color_cycle=n.COLORS) -s0, s1, s1_start = n.spliting_event(triplet=True) +s0, s1, s1_start = n.splitting_event(triplet=True) s0.display(ax, color="violet", lw=2, label="Eddies before splitting") s1.display(ax, color="blueviolet", lw=2, label="Eddies after splitting") s1_start.display(ax, color="black", lw=2, label="Eddies starting by splitting") diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index d6b4568b..757854d5 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -149,7 +149,13 @@ def get_obs(dataset): n_.median_filter(15, "time", "latitude") kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20 m = n_.scatter_timeline( - ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all", + ax, + "shape_error_e", + vmin=14, + vmax=70, + **kw, + yfield="lon", + method="all", ) ax.set_ylabel("Longitude") cb = update_axes(ax, m["scatter"]) diff --git a/examples/16_network/pet_segmentation_anim.py b/examples/16_network/pet_segmentation_anim.py index 503229e7..340163a1 100644 --- a/examples/16_network/pet_segmentation_anim.py +++ b/examples/16_network/pet_segmentation_anim.py @@ -96,7 +96,8 @@ def update(i_frame): indices_frames = INDICES[i_frame] mappable_CONTOUR.set_data( - e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames], + e.contour_lon_e[indices_frames], + e.contour_lat_e[indices_frames], ) mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)]) return (mappable_tracks,) diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index fbeb1450..f3ecec84 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -404,7 +404,7 @@ def identify_time(str_date): nc_dims=("obs",), nc_attr=dict( long_name="Previous observation index", - comment="Index of previous observation in a spliting case", + comment="Index of previous observation in a splitting case", ), ), next_obs=dict( @@ -422,14 +422,20 @@ def identify_time(str_date): nc_name="previous_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict(long_name="Previous cost for previous observation", comment="",), + nc_attr=dict( + long_name="Previous cost for previous observation", + comment="", + ), ), next_cost=dict( attr_name=None, nc_name="next_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict(long_name="Next cost for next observation", comment="",), + nc_attr=dict( + long_name="Next cost for next observation", + comment="", + ), ), n=dict( attr_name=None, @@ -640,7 +646,8 @@ def identify_time(str_date): nc_type="f4", nc_dims=("obs",), nc_attr=dict( - long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])", + long_name="Log base 10 background chlorophyll", + units="Log(Chl/[mg/m^3])", ), ), year=dict( diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index 4809fddf..df4e7d43 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -243,7 +243,8 @@ def browse_dataset_in( filenames = bytes_(glob(full_path)) dataset_list = empty( - len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")], + len(filenames), + dtype=[("filename", "S500"), ("date", "datetime64[s]")], ) dataset_list["filename"] = filenames @@ -371,7 +372,8 @@ def track( logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max()) logger.info( - "The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(), + "The mean length is %d observations for long track", + c.nb_obs_by_tracks.mean(), ) long_track.write_file(**kw_write) @@ -381,7 +383,14 @@ def track( def get_group( - dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60, + dataset1, + dataset2, + index1, + index2, + score, + invalid=2, + low=10, + high=60, ): group1, group2 = dict(), dict() m_valid = (score * 100) >= invalid @@ -490,7 +499,8 @@ def get_values(v, dataset): ] labels = dict( - high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}", + high=f"{high:0.0f} <= high", + low=f"{invalid:0.0f} <= low < {low:0.0f}", ) keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()] diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index c1a752ee..5c4cdcaf 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -76,7 +76,9 @@ def subset_network(): help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep", ) parser.add_argument( - "--remove_trash", action="store_true", help="Remove trash (network id == 0)", + "--remove_trash", + action="store_true", + help="Remove trash (network id == 0)", ) parser.add_argument( "-p", diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index f6db848b..59a042fe 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -61,13 +61,13 @@ def __init__( """ Create amplitude object - :param Contours contour: - :param float contour_height: - :param array data: - :param float interval: + :param Contours contour: usefull class defined below + :param float contour_height: field value of the contour + :param array data: grid + :param float interval: step between two contours :param int mle: maximum number of local extrema in contour - :param int nb_step_min: number of intervals to consider an eddy - :param int nb_step_to_be_mle: number of intervals to be considered as an another maxima + :param int nb_step_min: minimum number of intervals to consider the contour as an eddy + :param int nb_step_to_be_mle: number of intervals to be considered as another extrema """ # Height of the contour @@ -116,8 +116,7 @@ def within_amplitude_limits(self): def all_pixels_below_h0(self, level): """ Check CSS11 criterion 1: The SSH values of all of the pixels - are below (above) a given SSH threshold for cyclonic (anticyclonic) - eddies. + are below a given SSH threshold for cyclonic eddies. """ # In some cases pixel value may be very close to the contour bounds if self.sla.mask.any() or ((self.sla.data - self.h_0) > self.EPSILON).any(): @@ -602,8 +601,8 @@ def display( 4. - Amplitude criterion (yellow) :param str field: Must be 'shape_error', 'x', 'y' or 'radius'. - If define display_criterion is not use. - bins argument must be define + If defined display_criterion is not use. + bins argument must be defined :param array bins: bins used to colorize contour :param str cmap: Name of cmap for field display :param dict kwargs: look at :py:meth:`matplotlib.collections.LineCollection` diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 544fd5f5..6fea0ace 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -68,7 +68,7 @@ def get_missing_indices( def advect(x, y, c, t0, n_days): """ - Advect particle from t0 to t0 + n_days, with data cube. + Advect particles from t0 to t0 + n_days, with data cube. :param np.array(float) x: longitude of particles :param np.array(float) y: latitude of particles @@ -87,7 +87,17 @@ def advect(x, y, c, t0, n_days): return t, x, y -def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): +def particle_candidate( + c, + eddies, + step_mesh, + t_start, + i_target, + pct, + contour_start="speed", + contour_end="effective", + **kwargs +): """Select particles within eddies, advect them, return target observation and associated percentages :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles @@ -95,6 +105,8 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): :param int t_start: julian day of the advection :param np.array(int) i_target: corresponding obs where particles are advected :param np.array(int) pct: corresponding percentage of avected particles + :param str contour_start: contour where particles are injected + :param str contour_end: contour where particles are counted after advection :params dict kwargs: dict of params given to `advect` """ @@ -105,7 +117,14 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): # to be able to get global index translate_start = where(m_start)[0] - x, y, i_start = e.create_particles(step_mesh) + # Create particles in specified contour + if contour_start == "speed": + x, y, i_start = e.create_particles(step_mesh, intern=True) + elif contour_start == "effective": + x, y, i_start = e.create_particles(step_mesh, intern=False) + else: + x, y, i_start = e.create_particles(step_mesh, intern=True) + print("The contour_start was not correct, speed contour is used") # Advection t_end, x, y = advect(x, y, c, t_start, **kwargs) @@ -117,8 +136,14 @@ def particle_candidate(c, eddies, step_mesh, t_start, i_target, pct, **kwargs): # to be able to get global index translate_end = where(m_end)[0] - # Id eddies for each alive particle (in core and extern) - i_end = e_end.contains(x, y) + # Id eddies for each alive particle in specified contour + if contour_end == "speed": + i_end = e_end.contains(x, y, intern=True) + elif contour_end == "effective": + i_end = e_end.contains(x, y, intern=False) + else: + i_end = e_end.contains(x, y, intern=True) + print("The contour_end was not correct, speed contour is used") # compute matrix and fill target array get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) @@ -206,7 +231,7 @@ def filled_by_interpolation(self, mask): ) def insert_virtual(self): - """insert virtual observations on segments where observations are missing""" + """Insert virtual observations on segments where observations are missing""" dt_theorical = median(self.time[1:] - self.time[:-1]) indices = self.get_missing_indices(dt_theorical) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index cb6d3986..0ae80634 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -23,6 +23,9 @@ zeros, ) +import netCDF4 +import zarr + from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap @@ -147,7 +150,7 @@ def get_missing_indices(self, dt): ) def fix_next_previous_obs(self): - """function used after 'insert_virtual', to correct next_obs and + """Function used after 'insert_virtual', to correct next_obs and previous obs. """ @@ -577,7 +580,7 @@ def close_network(self, other, nb_obs_min=10, **kwargs): return other.extract_with_mask(m) def normalize_longitude(self): - """Normalize all longitude + """Normalize all longitudes Normalize longitude field and in the same range : - longitude_max @@ -677,7 +680,13 @@ def display_timeline( """ self.only_one_network() j = 0 - line_kw = dict(ls="-", marker="+", markersize=6, zorder=1, lw=3,) + line_kw = dict( + ls="-", + marker="+", + markersize=6, + zorder=1, + lw=3, + ) line_kw.update(kwargs) mappables = dict(lines=list()) @@ -719,7 +728,7 @@ def display_timeline( def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="roll"): """Mark events in plot""" j = 0 - events = dict(spliting=[], merging=[]) + events = dict(splitting=[], merging=[]) # TODO : fill mappables dict y_seg = dict() @@ -784,15 +793,15 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol ) ) ax.plot((x[0], _time[i_p]), (y0, y1), **event_kw)[0] - events["spliting"].append((x[0], y0)) + events["splitting"].append((x[0], y0)) j += 1 kwargs = dict(color="k", zorder=-1, linestyle=" ") - if len(events["spliting"]) > 0: - X, Y = list(zip(*events["spliting"])) + if len(events["splitting"]) > 0: + X, Y = list(zip(*events["splitting"])) ref = ax.plot( - X, Y, marker="*", markersize=12, label="spliting events", **kwargs + X, Y, marker="*", markersize=12, label="splitting events", **kwargs )[0] mappables.setdefault("events", []).append(ref) @@ -910,7 +919,10 @@ def event_map(self, ax, **kwargs): """Add the merging and splitting events to a map""" j = 0 mappables = dict() - symbol_kw = dict(markersize=10, color="k",) + symbol_kw = dict( + markersize=10, + color="k", + ) symbol_kw.update(kwargs) symbol_kw_split = symbol_kw.copy() symbol_kw_split["markersize"] += 4 @@ -939,7 +951,13 @@ def event_map(self, ax, **kwargs): return mappables def scatter( - self, ax, name="time", factor=1, ref=None, edgecolor_cycle=None, **kwargs, + self, + ax, + name="time", + factor=1, + ref=None, + edgecolor_cycle=None, + **kwargs, ): """ This function scatters the path of each network, with the merging and splitting events @@ -1001,6 +1019,8 @@ def segment_track_array(self): return build_unique_array(self.segment, self.track) def birth_event(self): + """Extract birth events. + Advice : individual eddies (self.track == 0) should be removed before -> apply remove_trash.""" # FIXME how to manage group 0 indices = list() previous_obs = self.previous_obs @@ -1014,6 +1034,8 @@ def birth_event(self): return self.extract_event(list(set(indices))) def death_event(self): + """Extract death events. + Advice : individual eddies (self.track == 0) should be removed before -> apply remove_trash.""" # FIXME how to manage group 0 indices = list() next_obs = self.next_obs @@ -1064,7 +1086,7 @@ def merging_event(self, triplet=False, only_index=False): else: return self.extract_event(idx_m1) - def spliting_event(self, triplet=False, only_index=False): + def splitting_event(self, triplet=False, only_index=False): """Return observation before a splitting event. If `triplet=True` return the eddy before a splitting event, the eddy after the splitting event, @@ -1105,7 +1127,7 @@ def spliting_event(self, triplet=False, only_index=False): def dissociate_network(self): """ - Dissociate networks with no known interaction (spliting/merging) + Dissociate networks with no known interaction (splitting/merging) """ tags = self.tag_segment(multi_network=True) @@ -1183,7 +1205,7 @@ def fully_connected(self): def remove_trash(self): """ - Remove the lonely eddies (only 1 obs in segment, associated segment number is 0) + Remove the lonely eddies (only 1 obs in segment, associated network number is 0) """ return self.extract_with_mask(self.track != 0) @@ -1372,7 +1394,7 @@ def analysis_coherence( date_function, uv_params, advection_mode="both", - dt_advect=14, + n_days=14, step_mesh=1.0 / 50, output_name=None, dissociate_network=False, @@ -1380,7 +1402,26 @@ def analysis_coherence( remove_dead_end=0, ): - """Global function to analyse segments coherence, with network preprocessing""" + """Global function to analyse segments coherence, with network preprocessing. + :param callable date_function: python function, takes as param `int` (julian day) and return + data filename associated to the date + :param dict uv_params: dict of parameters used by + :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` + :param int n_days: nuber of days for advection + :param float step_mesh: step for particule mesh in degrees + :param str output_name: path/name for the output (without extension) to store the clean + network in .nc and the coherence results in .zarr. Works only for advection_mode = "both" + :param bool dissociate_network: If True apply + :py:meth:`~py_eddy_tracker.observation.network.NetworkObservations.dissociate_network` + :param int correct_close_events: Number of days in + :py:meth:`~py_eddy_tracker.observation.network.NetworkObservations.correct_close_events` + :param int remove_dead_end: Number of days in + :py:meth:`~py_eddy_tracker.observation.network.NetworkObservations.remove_dead_end` + :return target_forward, target_bakward: 2D numpy.array with the eddy observation the + particles ended in after advection + :return target_forward, target_bakward: percentage of ending particles within the + eddy observation with regards to the starting number + """ if dissociate_network: self.dissociate_network() @@ -1393,19 +1434,53 @@ def analysis_coherence( else: network_clean = self - res = network_clean.segment_coherence( - date_function=date_function, - uv_params=uv_params, - advection_mode=advection_mode, - output_name=output_name, - dt_advect=dt_advect, - step_mesh=step_mesh, - ) + network_clean.numbering_segment() + + res = [] + if (advection_mode == "both") | (advection_mode == "forward"): + target_forward, pct_forward = network_clean.segment_coherence_forward( + date_function=date_function, + uv_params=uv_params, + n_days=n_days, + step_mesh=step_mesh, + ) + res = res + [target_forward, pct_forward] + + if (advection_mode == "both") | (advection_mode == "backward"): + target_backward, pct_backward = network_clean.segment_coherence_backward( + date_function=date_function, + uv_params=uv_params, + n_days=n_days, + step_mesh=step_mesh, + ) + res = res + [target_backward, pct_backward] + + if (output_name is not None) & (advection_mode == "both"): + # TODO : put some path verification? + # Save the clean network in netcdf + with netCDF4.Dataset(output_name + ".nc", "w") as fh: + network_clean.to_netcdf(fh) + # Save the results of particles advection in zarr + # zarr compression parameters + # TODO : check size? compression? + params_seg = dict() + params_pct = dict() + zg = zarr.open(output_name + ".zarr", mode="w") + zg.array("target_forward", target_forward, **params_seg) + zg.array("pct_forward", pct_forward, **params_pct) + zg.array("target_backward", target_backward, **params_seg) + zg.array("pct_backward", pct_backward, **params_pct) return network_clean, res def segment_coherence_backward( - self, date_function, uv_params, n_days=14, step_mesh=1.0 / 50, output_name=None, + self, + date_function, + uv_params, + n_days=14, + step_mesh=1.0 / 50, + contour_start="speed", + contour_end="speed", ): """ @@ -1434,7 +1509,7 @@ def date2file(julian_day): itb_final = -ones((self.obs.size, 2), dtype="i4") ptb_final = zeros((self.obs.size, 2), dtype="i1") - t_start, t_end = self.period + t_start, t_end = int(self.period[0]), int(self.period[1]) dates = arange(t_start, t_start + n_days + 1) first_files = [date_function(x) for x in dates] @@ -1455,17 +1530,33 @@ def date2file(julian_day): # add next date to GridCollection and delete last date c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) particle_candidate( - c, self, step_mesh, _t, itb_final, ptb_final, n_days=-n_days + c, + self, + step_mesh, + _t, + itb_final, + ptb_final, + n_days=-n_days, + contour_start=contour_start, + contour_end=contour_end, + ) + logger.info( + ( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" + f" : {time.time()-_timestamp:5.2f}s" + ) ) - logger.info(( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" - f" : {time.time()-_timestamp:5.2f}s" - )) return itb_final, ptb_final def segment_coherence_forward( - self, date_function, uv_params, n_days=14, step_mesh=1.0 / 50, + self, + date_function, + uv_params, + n_days=14, + step_mesh=1.0 / 50, + contour_start="speed", + contour_end="speed", ): """ @@ -1494,7 +1585,7 @@ def date2file(julian_day): itf_final = -ones((self.obs.size, 2), dtype="i4") ptf_final = zeros((self.obs.size, 2), dtype="i1") - t_start, t_end = self.period + t_start, t_end = int(self.period[0]), int(self.period[1]) # if begin is not None and begin > t_start: # t_start = begin # if end is not None and end < t_end: @@ -1519,12 +1610,22 @@ def date2file(julian_day): # add next date to GridCollection and delete last date c.shift_files(t_shift, date_function(int(t_shift)), **uv_params) particle_candidate( - c, self, step_mesh, _t, itf_final, ptf_final, n_days=n_days + c, + self, + step_mesh, + _t, + itf_final, + ptf_final, + n_days=n_days, + contour_start=contour_start, + contour_end=contour_end, + ) + logger.info( + ( + f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" + f" : {time.time()-_timestamp:5.2f}s" + ) ) - logger.info(( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" - f" : {time.time()-_timestamp:5.2f}s" - )) return itf_final, ptf_final diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index db0c2a45..dec9a6b0 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -702,7 +702,11 @@ def load_file(cls, filename, **kwargs): .. code-block:: python kwargs_latlon_300 = dict( - include_vars=["longitude", "latitude",], indexs=dict(obs=slice(0, 300)), + include_vars=[ + "longitude", + "latitude", + ], + indexs=dict(obs=slice(0, 300)), ) small_dataset = TrackEddiesObservations.load_file( filename, **kwargs_latlon_300 @@ -1973,7 +1977,11 @@ def bins_stat(self, xname, bins=None, yname=None, method=None, mask=None): def format_label(self, label): t0, t1 = self.period - return label.format(t0=t0, t1=t1, nb_obs=len(self),) + return label.format( + t0=t0, + t1=t1, + nb_obs=len(self), + ) def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies @@ -2283,7 +2291,7 @@ def nb_days(self): return self.period[1] - self.period[0] + 1 def create_particles(self, step, intern=True): - """create particles only inside speed contour. Avoid creating too large numpy arrays, only to me masked + """Create particles inside contour (Default : speed contour). Avoid creating too large numpy arrays, only to be masked :param step: step for particles :type step: float @@ -2345,7 +2353,14 @@ def grid_count_pixel_in( x_, y_ = reduce_size(x_, y_) v = create_vertice(x_, y_) (x_start, x_stop), (y_start, y_stop) = bbox_indice_regular( - v, x_bounds, y_bounds, xstep, ystep, N, is_circular, x_size, + v, + x_bounds, + y_bounds, + xstep, + ystep, + N, + is_circular, + x_size, ) i, j = get_pixel_in_regular(v, x_c, y_c, x_start, x_stop, y_start, y_stop) grid_count_(grid, i, j) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 492842c7..3aa43387 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -578,7 +578,10 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): def format_label(self, label): t0, t1 = self.period return label.format( - t0=t0, t1=t1, nb_obs=len(self), nb_tracks=(self.nb_obs_by_track != 0).sum(), + t0=t0, + t1=t1, + nb_obs=len(self), + nb_tracks=(self.nb_obs_by_track != 0).sum(), ) def plot(self, ax, ref=None, **kwargs): @@ -702,7 +705,16 @@ def follow_obs(cls, i_next, track_id, used, ids, *args, **kwargs): @staticmethod def get_previous_obs( - i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs + i_current, + ids, + x, + y, + time_s, + time_e, + time_ref, + window, + min_overlap=0.01, + **kwargs, ): """Backward association of observations to the segments""" time_cur = int_(ids["time"][i_current]) @@ -720,7 +732,7 @@ def get_previous_obs( c = zeros(len(xj)) c[ij] = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) # We remove low overlap - c[c < 0.01] = 0 + c[c < min_overlap] = 0 # We get index of maximal overlap i = c.argmax() c_i = c[i] @@ -732,7 +744,18 @@ def get_previous_obs( break @staticmethod - def get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwargs): + def get_next_obs( + i_current, + ids, + x, + y, + time_s, + time_e, + time_ref, + window, + min_overlap=0.01, + **kwargs, + ): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 time_cur = int_(ids["time"][i_current]) @@ -752,7 +775,7 @@ def get_next_obs(i_current, ids, x, y, time_s, time_e, time_ref, window, **kwarg c = zeros(len(xj)) c[ij] = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) # We remove low overlap - c[c < 0.01] = 0 + c[c < min_overlap] = 0 # We get index of maximal overlap i = c.argmax() c_i = c[i] diff --git a/tests/test_grid.py b/tests/test_grid.py index 34187357..2c89550a 100644 --- a/tests/test_grid.py +++ b/tests/test_grid.py @@ -7,7 +7,15 @@ G = RegularGridDataset(get_demo_path("mask_1_60.nc"), "lon", "lat") X = 0.025 -contour = Path(((-X, 0), (X, 0), (X, X), (-X, X), (-X, 0),)) +contour = Path( + ( + (-X, 0), + (X, 0), + (X, X), + (-X, X), + (-X, 0), + ) +) # contour From 80c529a0981d3e56bc5efd4eddf0f165aa7c6a61 Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Tue, 17 Aug 2021 14:00:51 +0200 Subject: [PATCH 045/115] numba requires specific numpy version because doc compil finds error: numpy 1.21.2 is installed but numpy<1.21,>=1.17 is required by {'numba'} --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 097e786a..477cf32d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ matplotlib netCDF4 numba>=0.53 -numpy +numpy<1.21 opencv-python pint polygon3 From c63fea2adb614410dd0db4f969eac7ce0731fdec Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Thu, 16 Sep 2021 13:18:39 +0200 Subject: [PATCH 046/115] remove argument for conda setup --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 941bb7aa..cf1de6f6 100644 --- a/environment.yml +++ b/environment.yml @@ -6,6 +6,6 @@ dependencies: - python=3.7 - ffmpeg - pip: - - -r file:requirements.txt + - -r requirements.txt - pyeddytrackersample - . From 3afb70969397e9f8b25e4fe95a81c10bd1e73915 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Sun, 26 Sep 2021 18:27:08 +0200 Subject: [PATCH 047/115] add changelog infos --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f33f15dd..6c37a82f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -19,6 +19,9 @@ Changed Fixed ^^^^^ +- Fix bug in convolution(filter), lowest rows was replace by zeros in convolution computation. + Important impact for tiny kernel + Added ^^^^^ From a7ef56e6ec60012e6bf6d036e1d5b3c01a35bede Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Fri, 17 Sep 2021 20:52:04 +0200 Subject: [PATCH 048/115] Github : Apply test on Ubuntu & Windows for python 37,38,39 --- .github/workflows/python-app.yml | 20 ++++++++++++++------ README.md | 2 ++ 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 7fc9f385..286d9d6c 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -7,19 +7,27 @@ on: [push, pull_request] jobs: build: - - runs-on: ubuntu-latest + strategy: + matrix: + # os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest] + python_version: [3.7, 3.8, 3.9] + name: Run py eddy tracker build tests + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash -l {0} steps: - uses: actions/checkout@v2 - - name: Set up Python 3.7 + - name: Set up Python ${{ matrix.python_version }} uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: ${{ matrix.python_version }} - name: Install dependencies run: | python -m pip install --upgrade pip - pip install flake8 pytest + pip install flake8 pytest pytest-cov if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Install package run: | @@ -32,4 +40,4 @@ jobs: flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with pytest run: | - pytest + pytest --cov src/py_eddy_tracker diff --git a/README.md b/README.md index 736bf9b7..e26e15ac 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,8 @@ Method was described in : +[Pegliasco, C., Delepoulle, A., Morrow, R., Faugère, Y., and Dibarboure, G.: META3.1exp : A new Global Mesoscale Eddy Trajectories Atlas derived from altimetry, Earth Syst. Sci. Data Discuss.](https://doi.org/10.5194/essd-2021-300) + [Mason, E., A. Pascual, and J. C. McWilliams, 2014: A new sea surface height–based code for oceanic mesoscale eddy tracking.](https://doi.org/10.1175/JTECH-D-14-00019.1) ### Use case ### From 98161f3f531c23879b6394e07bbbe015c9732de0 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Sat, 25 Sep 2021 16:14:49 +0200 Subject: [PATCH 049/115] Add dummy test on convolution => which detect an index error in original code(corrected) --- requirements.txt | 2 +- src/py_eddy_tracker/dataset/grid.py | 4 +- src/py_eddy_tracker/observations/network.py | 26 +++--------- tests/test_grid.py | 45 ++++++++++++++++----- 4 files changed, 44 insertions(+), 33 deletions(-) diff --git a/requirements.txt b/requirements.txt index 477cf32d..c4ff9c41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -matplotlib +matplotlib<3.5 netCDF4 numba>=0.53 numpy<1.21 diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 6c8e332f..59cda040 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -680,6 +680,7 @@ def eddy_identification( ) z_min, z_max = z_min_p, z_max_p + logger.debug("Levels from %f to %f", z_min, z_max) levels = arange(z_min - z_min % step, z_max - z_max % step + 2 * step, step) # Get x and y values @@ -1404,7 +1405,8 @@ def convolve_filter_with_dynamic_kernel( tmp_matrix = ma.zeros((2 * d_lon + data.shape[0], k_shape[1])) tmp_matrix.mask = ones(tmp_matrix.shape, dtype=bool) # Slice to apply on input data - sl_lat_data = slice(max(0, i - d_lat), min(i + d_lat, data.shape[1])) + # +1 for upper bound, to take in acount this column + sl_lat_data = slice(max(0, i - d_lat), min(i + d_lat + 1, data.shape[1])) # slice to apply on temporary matrix to store input data sl_lat_in = slice( d_lat - (i - sl_lat_data.start), d_lat + (sl_lat_data.stop - i) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 0ae80634..90bf6b70 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -6,6 +6,8 @@ import time from glob import glob +import netCDF4 +import zarr from numba import njit from numpy import ( arange, @@ -23,9 +25,6 @@ zeros, ) -import netCDF4 -import zarr - from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap @@ -680,13 +679,7 @@ def display_timeline( """ self.only_one_network() j = 0 - line_kw = dict( - ls="-", - marker="+", - markersize=6, - zorder=1, - lw=3, - ) + line_kw = dict(ls="-", marker="+", markersize=6, zorder=1, lw=3,) line_kw.update(kwargs) mappables = dict(lines=list()) @@ -919,10 +912,7 @@ def event_map(self, ax, **kwargs): """Add the merging and splitting events to a map""" j = 0 mappables = dict() - symbol_kw = dict( - markersize=10, - color="k", - ) + symbol_kw = dict(markersize=10, color="k",) symbol_kw.update(kwargs) symbol_kw_split = symbol_kw.copy() symbol_kw_split["markersize"] += 4 @@ -951,13 +941,7 @@ def event_map(self, ax, **kwargs): return mappables def scatter( - self, - ax, - name="time", - factor=1, - ref=None, - edgecolor_cycle=None, - **kwargs, + self, ax, name="time", factor=1, ref=None, edgecolor_cycle=None, **kwargs, ): """ This function scatters the path of each network, with the merging and splitting events diff --git a/tests/test_grid.py b/tests/test_grid.py index 2c89550a..759a40e1 100644 --- a/tests/test_grid.py +++ b/tests/test_grid.py @@ -1,5 +1,5 @@ from matplotlib.path import Path -from numpy import array, isnan, ma +from numpy import arange, array, isnan, ma, nan, ones, zeros from pytest import approx from py_eddy_tracker.data import get_demo_path @@ -7,15 +7,7 @@ G = RegularGridDataset(get_demo_path("mask_1_60.nc"), "lon", "lat") X = 0.025 -contour = Path( - ( - (-X, 0), - (X, 0), - (X, X), - (-X, X), - (-X, 0), - ) -) +contour = Path(((-X, 0), (X, 0), (X, X), (-X, X), (-X, 0),)) # contour @@ -85,3 +77,36 @@ def test_interp(): assert g.interp("z", x0, y0) == 1.5 assert g.interp("z", x1, y1) == 2 assert isnan(g.interp("z", x2, y2)) + + +def test_convolution(): + """ + Add some dummy check on convolution filter + """ + # Fake grid + z = ma.array( + arange(12).reshape((-1, 1)) * arange(10).reshape((1, -1)), + mask=zeros((12, 10), dtype="bool"), + dtype="f4", + ) + g = RegularGridDataset.with_array( + coordinates=("x", "y"), + datas=dict(z=z, x=arange(0, 6, 0.5), y=arange(0, 5, 0.5),), + centered=True, + ) + + def kernel_func(lat): + return ones((3, 3)) + + # After transpose we must get same result + d = g.convolve_filter_with_dynamic_kernel("z", kernel_func) + assert (d.T[:9, :9] == d[:9, :9]).all() + # We mask one value and check convolution result + z.mask[2, 2] = True + d = g.convolve_filter_with_dynamic_kernel("z", kernel_func) + assert d[1, 1] == z[:3, :3].sum() / 8 + # Add nan and check only nearest value is contaminate + z[2, 2] = nan + d = g.convolve_filter_with_dynamic_kernel("z", kernel_func) + assert not isnan(d[0, 0]) + assert isnan(d[1:4, 1:4]).all() From e49e1232bf5979791b341386d6dddc9d0b771043 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Mon, 11 Oct 2021 15:53:44 +0200 Subject: [PATCH 050/115] change get_color() to get_edgecolor --- requirements.txt | 2 +- src/py_eddy_tracker/eddy_feature.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index c4ff9c41..477cf32d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -matplotlib<3.5 +matplotlib netCDF4 numba>=0.53 numpy<1.21 diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index 59a042fe..d2616957 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -646,7 +646,7 @@ def display( paths.append(i.vertices) local_kwargs = kwargs.copy() if "color" not in kwargs: - local_kwargs["color"] = collection.get_color() + local_kwargs["color"] = collection.get_edgecolor() local_kwargs.pop("label", None) elif j != 0: local_kwargs.pop("label", None) From 25bf0ee2f6287ddc68aa704e1d1f6343e03d3c1d Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Tue, 19 Oct 2021 22:13:12 +0200 Subject: [PATCH 051/115] Add option to manage issue #111 --- CHANGELOG.rst | 2 ++ src/py_eddy_tracker/dataset/grid.py | 16 +++++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 6c37a82f..75cc2dd0 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -25,6 +25,8 @@ Fixed Added ^^^^^ +- Allow to replace mask by isnan method to manage nan data instead of masked data + [3.5.0] - 2021-06-22 -------------------- diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 59cda040..2bb5b70d 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -258,6 +258,7 @@ class GridDataset(object): "global_attrs", "vars", "contours", + "nan_mask", ) GRAVITY = 9.807 @@ -267,7 +268,14 @@ class GridDataset(object): N = 1 def __init__( - self, filename, x_name, y_name, centered=None, indexs=None, unset=False + self, + filename, + x_name, + y_name, + centered=None, + indexs=None, + unset=False, + nan_masking=False, ): """ :param str filename: Filename to load @@ -276,6 +284,7 @@ def __init__( :param bool,None centered: Allow to know how coordinates could be used with pixel :param dict indexs: A dictionary that sets indexes to use for non-coordinate dimensions :param bool unset: Set to True to create an empty grid object without file + :param bool nan_masking: Set to True to replace data.mask with isnan method result """ self.dimensions = None self.variables_description = None @@ -286,6 +295,7 @@ def __init__( self.y_bounds = None self.x_dim = None self.y_dim = None + self.nan_mask = nan_masking self.centered = centered self.contours = None self.filename = filename @@ -519,6 +529,10 @@ def grid(self, varname, indexs=None): if i_x > i_y: self.variables_description[varname]["infos"]["transpose"] = True self.vars[varname] = self.vars[varname].T + if self.nan_mask: + self.vars[varname] = ma.array( + self.vars[varname], mask=isnan(self.vars[varname]), + ) if not hasattr(self.vars[varname], "mask"): self.vars[varname] = ma.array( self.vars[varname], From c7255e40ec96861c62635f7cc72f72ca7ff98554 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Tue, 26 Oct 2021 12:08:04 +0200 Subject: [PATCH 052/115] remove coverage --- .github/workflows/python-app.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 286d9d6c..a6fcceed 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -38,6 +38,3 @@ jobs: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test with pytest - run: | - pytest --cov src/py_eddy_tracker From c434372c3f8a05155eb4fdbe4b9996969f3b661e Mon Sep 17 00:00:00 2001 From: Cori Pegliasco Date: Tue, 23 Nov 2021 14:18:27 +0100 Subject: [PATCH 053/115] Resample contours in output form after fitting circles - add parameter presampling_multiplier to evenly over-resample before fitting circles - fit circles to get eddy parameters (radius, area, etc) - resample the contours with the output sampling --- src/py_eddy_tracker/dataset/grid.py | 66 +++++++++++++++++++---------- src/py_eddy_tracker/poly.py | 2 +- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 2bb5b70d..5b884b68 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -607,6 +607,7 @@ def eddy_identification( date, step=0.005, shape_error=55, + presampling_multiplier=10, sampling=50, sampling_method="visvalingam", pixel_limit=None, @@ -624,8 +625,10 @@ def eddy_identification( :param datetime.datetime date: Date to be stored in object to date data :param float,int step: Height between two layers in m :param float,int shape_error: Maximal error allowed for outermost contour in % + :param int presampling_multiplier: + Evenly oversample the initial number of points in the contour by nb_pts x presampling_multiplier to fit circles :param int sampling: Number of points to store contours and speed profile - :param str sampling_method: Method to resample, 'uniform' or 'visvalingam' + :param str sampling_method: Method to resample the stored contours, 'uniform' or 'visvalingam' :param (int,int),None pixel_limit: Min and max number of pixels inside the inner and the outermost contour to be considered as an eddy :param float,None precision: Truncate values at the defined precision in m @@ -849,42 +852,59 @@ def eddy_identification( obs.amplitude[:] = amp.amplitude obs.speed_average[:] = max_average_speed obs.num_point_e[:] = contour.lon.shape[0] - xy_e = resample(contour.lon, contour.lat, **out_sampling) - obs.contour_lon_e[:], obs.contour_lat_e[:] = xy_e obs.num_point_s[:] = speed_contour.lon.shape[0] - xy_s = resample( - speed_contour.lon, speed_contour.lat, **out_sampling - ) - obs.contour_lon_s[:], obs.contour_lat_s[:] = xy_s - # FIXME : we use a contour without resampling - # First, get position based on innermost contour - centlon_i, centlat_i, _, _ = _fit_circle_path( - create_vertice(inner_contour.lon, inner_contour.lat) + # Evenly resample contours with nb_pts = nb_pts_original x presampling_multiplier + xy_i = uniform_resample( + inner_contour.lon, + inner_contour.lat, + num_fac=presampling_multiplier + ) + xy_e = uniform_resample( + contour.lon, + contour.lat, + num_fac=presampling_multiplier, ) - # Second, get speed-based radius based on contour of max uavg + xy_s = uniform_resample( + speed_contour.lon, + speed_contour.lat, + num_fac=presampling_multiplier, + ) + + # First, get position of max SSH based on best fit circle with resampled innermost contour + centlon_i, centlat_i, _, _ = _fit_circle_path(create_vertice(*xy_i)) + obs.lon_max[:] = centlon_i + obs.lat_max[:] = centlat_i + + # Second, get speed-based radius, shape error, eddy center, area based on resampled contour of max uavg centlon_s, centlat_s, eddy_radius_s, aerr_s = _fit_circle_path( create_vertice(*xy_s) ) - # Compute again to use resampled contour - _, _, eddy_radius_e, aerr_e = _fit_circle_path( - create_vertice(*xy_e) - ) - obs.radius_s[:] = eddy_radius_s - obs.radius_e[:] = eddy_radius_e - obs.shape_error_e[:] = aerr_e obs.shape_error_s[:] = aerr_s obs.speed_area[:] = poly_area( *coordinates_to_local(*xy_s, lon0=centlon_s, lat0=centlat_s) ) + obs.lon[:] = centlon_s + obs.lat[:] = centlat_s + + # Third, compute effective radius, shape error, area from resampled effective contour + _, _, eddy_radius_e, aerr_e = _fit_circle_path( + create_vertice(*xy_e) + ) + obs.radius_e[:] = eddy_radius_e + obs.shape_error_e[:] = aerr_e obs.effective_area[:] = poly_area( *coordinates_to_local(*xy_e, lon0=centlon_s, lat0=centlat_s) ) - obs.lon[:] = centlon_s - obs.lat[:] = centlat_s - obs.lon_max[:] = centlon_i - obs.lat_max[:] = centlat_i + + # Finally, resample contours with output parameters + xy_e_f = resample(*xy_e, **out_sampling) + xy_s_f = resample(*xy_s, **out_sampling) + + obs.contour_lon_s[:], obs.contour_lat_s[:] = xy_s_f + obs.contour_lon_e[:], obs.contour_lat_e[:] = xy_e_f + if aerr > 99.9 or aerr_s > 99.9: logger.warning( "Strange shape at this step! shape_error : %f, %f", diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 0f0271ee..56fb55e7 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -86,7 +86,7 @@ def poly_area_vertice(v): @njit(cache=True) def poly_area(x, y): """ - Must be call with local coordinates (in m, to get an area in m²). + Must be called with local coordinates (in m, to get an area in m²). :param array x: :param array y: From 573c4f5e6991a41004fd9bcbaa72d04fe5c0cbfa Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Thu, 2 Sep 2021 13:30:02 +0200 Subject: [PATCH 054/115] Add information about animation #102 --- examples/08_tracking_manipulation/pet_track_anim.py | 4 +++- .../pet_track_anim_matplotlib_animation.py | 4 +++- .../08_tracking_manipulation/pet_track_anim.ipynb | 4 ++-- .../pet_track_anim_matplotlib_animation.ipynb | 4 ++-- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/examples/08_tracking_manipulation/pet_track_anim.py b/examples/08_tracking_manipulation/pet_track_anim.py index 0c18a0ba..94e09ad3 100644 --- a/examples/08_tracking_manipulation/pet_track_anim.py +++ b/examples/08_tracking_manipulation/pet_track_anim.py @@ -2,7 +2,9 @@ Track animation =============== -Run in a terminal this script, which allow to watch eddy evolution +Run in a terminal this script, which allow to watch eddy evolution. + +You could use also *EddyAnim* script to display/save animation. """ import py_eddy_tracker_sample diff --git a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py index 6776b47e..59b21527 100644 --- a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py +++ b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py @@ -2,7 +2,9 @@ Track animation with standard matplotlib ======================================== -Run in a terminal this script, which allow to watch eddy evolution +Run in a terminal this script, which allow to watch eddy evolution. + +You could use also *EddyAnim* script to display/save animation. """ import re diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb index 65768145..08364d16 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Track animation\n\nRun in a terminal this script, which allow to watch eddy evolution\n" + "\nTrack animation\n===============\n\nRun in a terminal this script, which allow to watch eddy evolution.\n\nYou could use also *EddyAnim* script to display/save animation.\n" ] }, { @@ -82,7 +82,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb index 6d7fcc2e..bcd4ba74 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Track animation with standard matplotlib\n\nRun in a terminal this script, which allow to watch eddy evolution\n" + "\nTrack animation with standard matplotlib\n========================================\n\nRun in a terminal this script, which allow to watch eddy evolution.\n\nYou could use also *EddyAnim* script to display/save animation.\n" ] }, { @@ -93,7 +93,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, From b357421817442f3e8637dc2736c27365e344c4bf Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Mon, 13 Dec 2021 11:11:30 +0100 Subject: [PATCH 055/115] Loopers (#118) * Add short example about loopers and eddies --- CHANGELOG.rst | 2 + examples/06_grid_manipulation/pet_advect.py | 2 +- examples/06_grid_manipulation/pet_lavd.py | 8 +- examples/07_cube_manipulation/pet_cube.py | 2 +- .../pet_track_anim_matplotlib_animation.py | 2 +- .../pet_normalised_lifetime.py | 18 +- .../12_external_data/pet_drifter_loopers.py | 153 ++++++++++++++ examples/16_network/pet_follow_particle.py | 2 +- examples/16_network/pet_group_anim.py | 2 +- examples/16_network/pet_ioannou_2017_case.py | 2 +- examples/16_network/pet_segmentation_anim.py | 5 +- .../06_grid_manipulation/pet_advect.ipynb | 18 +- .../06_grid_manipulation/pet_lavd.ipynb | 20 +- .../07_cube_manipulation/pet_cube.ipynb | 14 +- .../pet_track_anim_matplotlib_animation.ipynb | 4 +- .../pet_normalised_lifetime.ipynb | 12 +- .../pet_drifter_loopers.ipynb | 191 ++++++++++++++++++ .../16_network/pet_follow_particle.ipynb | 8 +- .../16_network/pet_group_anim.ipynb | 10 +- .../16_network/pet_ioannou_2017_case.ipynb | 22 +- .../16_network/pet_segmentation_anim.ipynb | 2 +- .../data/loopers_lumpkin_med.nc | Bin 0 -> 244130 bytes src/py_eddy_tracker/observations/network.py | 4 +- .../observations/observation.py | 3 +- src/py_eddy_tracker/observations/tracking.py | 13 +- src/py_eddy_tracker/poly.py | 20 +- 26 files changed, 432 insertions(+), 107 deletions(-) create mode 100644 examples/12_external_data/pet_drifter_loopers.py create mode 100644 notebooks/python_module/12_external_data/pet_drifter_loopers.ipynb create mode 100644 src/py_eddy_tracker/data/loopers_lumpkin_med.nc diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 75cc2dd0..c6ab4cac 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -21,11 +21,13 @@ Fixed - Fix bug in convolution(filter), lowest rows was replace by zeros in convolution computation. Important impact for tiny kernel +- Fix method of sampling before contour fitting Added ^^^^^ - Allow to replace mask by isnan method to manage nan data instead of masked data +- Add drifter colocation example [3.5.0] - 2021-06-22 -------------------- diff --git a/examples/06_grid_manipulation/pet_advect.py b/examples/06_grid_manipulation/pet_advect.py index 0e00697f..1a98536a 100644 --- a/examples/06_grid_manipulation/pet_advect.py +++ b/examples/06_grid_manipulation/pet_advect.py @@ -50,7 +50,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index e597821c..89d64108 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -65,7 +65,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass @@ -159,11 +159,7 @@ def update(i_frame): # Format LAVD data lavd = RegularGridDataset.with_array( coordinates=("lon", "lat"), - datas=dict( - lavd=lavd.T, - lon=x_g, - lat=y_g, - ), + datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,), centered=True, ) diff --git a/examples/07_cube_manipulation/pet_cube.py b/examples/07_cube_manipulation/pet_cube.py index a674359d..7f30c4e1 100644 --- a/examples/07_cube_manipulation/pet_cube.py +++ b/examples/07_cube_manipulation/pet_cube.py @@ -31,7 +31,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py index 59b21527..81e57e59 100644 --- a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py +++ b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py @@ -30,7 +30,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/10_tracking_diagnostics/pet_normalised_lifetime.py b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py index 73e5274e..1c84a8cc 100644 --- a/examples/10_tracking_diagnostics/pet_normalised_lifetime.py +++ b/examples/10_tracking_diagnostics/pet_normalised_lifetime.py @@ -65,14 +65,14 @@ def eddy_norm_lifetime(self, name, nb, factor=1): # %% # Figure # ------ -fig, axs = plt.subplots(nrows=2, figsize=(8, 6)) +fig, (ax0, ax1) = plt.subplots(nrows=2, figsize=(8, 6)) -axs[0].set_title("Normalised Mean Radius") -axs[0].plot(*AC_radius), axs[0].plot(*CC_radius) -axs[0].set_ylabel("Radius (km)"), axs[0].grid() -axs[0].set_xlim(0, 1), axs[0].set_ylim(0, None) +ax0.set_title("Normalised Mean Radius") +ax0.plot(*AC_radius), ax0.plot(*CC_radius) +ax0.set_ylabel("Radius (km)"), ax0.grid() +ax0.set_xlim(0, 1), ax0.set_ylim(0, None) -axs[1].set_title("Normalised Mean Amplitude") -axs[1].plot(*AC_amplitude, label="AC"), axs[1].plot(*CC_amplitude, label="CC") -axs[1].set_ylabel("Amplitude (cm)"), axs[1].grid(), axs[1].legend() -_ = axs[1].set_xlim(0, 1), axs[1].set_ylim(0, None) +ax1.set_title("Normalised Mean Amplitude") +ax1.plot(*AC_amplitude, label="AC"), ax1.plot(*CC_amplitude, label="CC") +ax1.set_ylabel("Amplitude (cm)"), ax1.grid(), ax1.legend() +_ = ax1.set_xlim(0, 1), ax1.set_ylim(0, None) diff --git a/examples/12_external_data/pet_drifter_loopers.py b/examples/12_external_data/pet_drifter_loopers.py new file mode 100644 index 00000000..92707906 --- /dev/null +++ b/examples/12_external_data/pet_drifter_loopers.py @@ -0,0 +1,153 @@ +""" +Colocate looper with eddy from altimetry +======================================== + +All loopers data used in this example are a subset from the dataset described in this article +[Lumpkin, R. : Global characteristics of coherent vortices from surface drifter trajectories](https://doi.org/10.1002/2015JC011435) +""" + +import re + +import numpy as np +import py_eddy_tracker_sample +from matplotlib import pyplot as plt +from matplotlib.animation import FuncAnimation + +from py_eddy_tracker import data +from py_eddy_tracker.appli.gui import Anim +from py_eddy_tracker.observations.tracking import TrackEddiesObservations + + +# %% +class VideoAnimation(FuncAnimation): + def _repr_html_(self, *args, **kwargs): + """To get video in html and have a player""" + content = self.to_html5_video() + return re.sub( + r'width="[0-9]*"\sheight="[0-9]*"', 'width="100%" height="100%"', content + ) + + def save(self, *args, **kwargs): + if args[0].endswith("gif"): + # In this case gif is used to create thumbnail which is not used but consume same time than video + # So we create an empty file, to save time + with open(args[0], "w") as _: + pass + return + return super().save(*args, **kwargs) + + +def start_axes(title): + fig = plt.figure(figsize=(13, 5)) + ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], aspect="equal") + ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46) + ax.set_title(title, weight="bold") + return ax + + +def update_axes(ax, mappable=None): + ax.grid() + if mappable: + plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9])) + + +# %% +# Load eddies dataset +cyclonic_eddies = TrackEddiesObservations.load_file( + py_eddy_tracker_sample.get_demo_path("eddies_med_adt_allsat_dt2018/Cyclonic.zarr") +) +anticyclonic_eddies = TrackEddiesObservations.load_file( + py_eddy_tracker_sample.get_demo_path( + "eddies_med_adt_allsat_dt2018/Anticyclonic.zarr" + ) +) + +# %% +# Load loopers dataset +loopers_med = TrackEddiesObservations.load_file( + data.get_demo_path("loopers_lumpkin_med.nc") +) + +# %% +# Global view +# =========== +ax = start_axes("All drifters available in Med from Lumpkin dataset") +loopers_med.plot(ax, lw=0.5, color="r", ref=-10) +update_axes(ax) + +# %% +# One segment of drifter +# ====================== +# +# Get a drifter segment (the indexes used have no correspondance with the original dataset). +looper = loopers_med.extract_ids((3588,)) +fig = plt.figure(figsize=(16, 6)) +ax = fig.add_subplot(111, aspect="equal") +looper.plot(ax, lw=0.5, label="Original position of drifter") +looper_filtered = looper.copy() +looper_filtered.position_filter(1, 13) +s = looper_filtered.scatter( + ax, + "time", + cmap=plt.get_cmap("Spectral_r", 20), + label="Filtered position of drifter", +) +plt.colorbar(s).set_label("time (days from 1/1/1950)") +ax.legend() +ax.grid() + +# %% +# Try to find a detected eddies with adt at same place. We used filtered track to simulate an eddy center +match = looper_filtered.close_tracks( + anticyclonic_eddies, method="close_center", delta=0.1, nb_obs_min=50 +) +fig = plt.figure(figsize=(16, 6)) +ax = fig.add_subplot(111, aspect="equal") +looper.plot(ax, lw=0.5, label="Original position of drifter") +looper_filtered.plot(ax, lw=1.5, label="Filtered position of drifter") +match.plot(ax, lw=1.5, label="Matched eddy") +ax.legend() +ax.grid() + +# %% +# Display radius of this 2 datasets. +fig = plt.figure(figsize=(20, 8)) +ax = fig.add_subplot(111) +ax.plot(looper.time, looper.radius_s / 1e3, label="loopers") +looper_radius = looper.copy() +looper_radius.median_filter(1, "time", "radius_s", inplace=True) +looper_radius.loess_filter(13, "time", "radius_s", inplace=True) +ax.plot( + looper_radius.time, + looper_radius.radius_s / 1e3, + label="loopers (filtered half window 13 days)", +) +ax.plot(match.time, match.radius_s / 1e3, label="altimetry") +match_radius = match.copy() +match_radius.median_filter(1, "time", "radius_s", inplace=True) +match_radius.loess_filter(13, "time", "radius_s", inplace=True) +ax.plot( + match_radius.time, + match_radius.radius_s / 1e3, + label="altimetry (filtered half window 13 days)", +) +ax.set_ylabel("radius(km)"), ax.set_ylim(0, 100) +ax.legend() +ax.set_title("Radius from loopers and altimeter") +ax.grid() + + +# %% +# Animation of a drifter and its colocated eddy +def update(frame): + # We display last 5 days of loopers trajectory + m = (looper.time < frame) * (looper.time > (frame - 5)) + anim.func_animation(frame) + line.set_data(looper.lon[m], looper.lat[m]) + + +anim = Anim(match, intern=True, figsize=(8, 8), cmap="magma_r", nb_step=10, dpi=75) +# mappable to show drifter in red +line = anim.ax.plot([], [], "r", lw=4, zorder=100)[0] +anim.fig.suptitle("") +_ = VideoAnimation(anim.fig, update, frames=np.arange(*anim.period, 1), interval=125) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 1c858879..dbe0753e 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -31,7 +31,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is used to create thumbnail which are not used but consumes same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/16_network/pet_group_anim.py b/examples/16_network/pet_group_anim.py index 8ecee534..047f5820 100644 --- a/examples/16_network/pet_group_anim.py +++ b/examples/16_network/pet_group_anim.py @@ -29,7 +29,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index bbe26e3f..b02b846a 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -36,7 +36,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass diff --git a/examples/16_network/pet_segmentation_anim.py b/examples/16_network/pet_segmentation_anim.py index 340163a1..58f71188 100644 --- a/examples/16_network/pet_segmentation_anim.py +++ b/examples/16_network/pet_segmentation_anim.py @@ -27,7 +27,7 @@ def _repr_html_(self, *args, **kwargs): def save(self, *args, **kwargs): if args[0].endswith("gif"): - # In this case gif is use to create thumbnail which are not use but consume same time than video + # In this case gif is used to create thumbnail which is not used but consume same time than video # So we create an empty file, to save time with open(args[0], "w") as _: pass @@ -96,8 +96,7 @@ def update(i_frame): indices_frames = INDICES[i_frame] mappable_CONTOUR.set_data( - e.contour_lon_e[indices_frames], - e.contour_lat_e[indices_frames], + e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames], ) mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)]) return (mappable_tracks,) diff --git a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb index bceed074..79d69b0d 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Grid advection\n\nDummy advection which use only static geostrophic current, which didn't solve the complex circulation of the ocean.\n" + "\nGrid advection\n==============\n\nDummy advection which use only static geostrophic current, which didn't solve the complex circulation of the ocean.\n" ] }, { @@ -91,14 +91,14 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\nParticles setup\n\n" + "Anim\n----\nParticles setup\n\n" ] }, { @@ -152,7 +152,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Filament forward\nDraw 3 last position in one path for each particles.,\nit could be run backward with `backward=True` option in filament method\n\n" + "Filament forward\n^^^^^^^^^^^^^^^^\nDraw 3 last position in one path for each particles.,\nit could be run backward with `backward=True` option in filament method\n\n" ] }, { @@ -170,7 +170,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Particle forward\nForward advection of particles\n\n" + "Particle forward\n^^^^^^^^^^^^^^^^^\nForward advection of particles\n\n" ] }, { @@ -206,14 +206,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Particles stat\n\n" + "Particles stat\n--------------\n\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Time_step settings\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" + "Time_step settings\n^^^^^^^^^^^^^^^^^^\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" ] }, { @@ -231,7 +231,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Time duration\nWe keep same time_step but change time duration\n\n" + "Time duration\n^^^^^^^^^^^^^\nWe keep same time_step but change time duration\n\n" ] }, { @@ -262,7 +262,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb index a5ca088c..c4a4da84 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# LAVD experiment\n\nNaive method to reproduce LAVD(Lagrangian-Averaged Vorticity deviation) method with a static velocity field.\nIn the current example we didn't remove a mean vorticity.\n\nMethod are described here:\n\n - Abernathey, Ryan, and George Haller. \"Transport by Lagrangian Vortices in the Eastern Pacific\",\n Journal of Physical Oceanography 48, 3 (2018): 667-685, accessed Feb 16, 2021,\n https://doi.org/10.1175/JPO-D-17-0102.1\n - `Transport by Coherent Lagrangian Vortices`_,\n R. Abernathey, Sinha A., Tarshish N., Liu T., Zhang C., Haller G., 2019,\n Talk a t the Sources and Sinks of Ocean Mesoscale Eddy Energy CLIVAR Workshop\n\n https://usclivar.org/sites/default/files/meetings/2019/presentations/Aberernathey_CLIVAR.pdf\n" + "\nLAVD experiment\n===============\n\nNaive method to reproduce LAVD(Lagrangian-Averaged Vorticity deviation) method with a static velocity field.\nIn the current example we didn't remove a mean vorticity.\n\nMethod are described here:\n\n - Abernathey, Ryan, and George Haller. \"Transport by Lagrangian Vortices in the Eastern Pacific\",\n Journal of Physical Oceanography 48, 3 (2018): 667-685, accessed Feb 16, 2021,\n https://doi.org/10.1175/JPO-D-17-0102.1\n - `Transport by Coherent Lagrangian Vortices`_,\n R. Abernathey, Sinha A., Tarshish N., Liu T., Zhang C., Haller G., 2019,\n Talk a t the Sources and Sinks of Ocean Mesoscale Eddy Energy CLIVAR Workshop\n\n https://usclivar.org/sites/default/files/meetings/2019/presentations/Aberernathey_CLIVAR.pdf\n" ] }, { @@ -48,14 +48,14 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Data\nTo compute vorticity ($\\omega$) we compute u/v field with a stencil and apply the following equation with stencil\nmethod :\n\n\\begin{align}\\omega = \\frac{\\partial v}{\\partial x} - \\frac{\\partial u}{\\partial y}\\end{align}\n\n" + "Data\n----\nTo compute vorticity ($\\omega$) we compute u/v field with a stencil and apply the following equation with stencil\nmethod :\n\n\\begin{align}\\omega = \\frac{\\partial v}{\\partial x} - \\frac{\\partial u}{\\partial y}\\end{align}\n\n" ] }, { @@ -91,7 +91,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Particles\nParticles specification\n\n" + "Particles\n---------\nParticles specification\n\n" ] }, { @@ -109,7 +109,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## LAVD\n\n" + "LAVD\n----\n\n" ] }, { @@ -127,7 +127,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Anim\nMovie of LAVD integration at each integration time step.\n\n" + "Anim\n^^^^\nMovie of LAVD integration at each integration time step.\n\n" ] }, { @@ -138,14 +138,14 @@ }, "outputs": [], "source": [ - "def update(i_frame):\n global lavd, i\n i += 1\n x, y = particule.__next__()\n # Interp vorticity on new_position\n lavd += abs(g.interp(\"vort\", x, y).reshape(original_shape) * 1 / nb_time)\n txt.set_text(f\"T0 + {i / step_by_day:.2f} days of advection\")\n pcolormesh.set_array(lavd / i * nb_time)\n return pcolormesh, txt\n\n\nkw_video = dict(frames=arange(nb_time), interval=1000.0 / step_by_day / 2, blit=True)\nfig, ax, txt = start_ax(dpi=60)\nx_g_, y_g_ = arange(0 - step / 2, 36 + step / 2, step), arange(\n 28 - step / 2, 46 + step / 2, step\n)\n# pcolorfast will be faster than pcolormesh, we could use pcolorfast due to x and y are regular\npcolormesh = ax.pcolorfast(x_g_, y_g_, lavd, **kw_vorticity)\nupdate_axes(ax, pcolormesh)\n_ = VideoAnimation(ax.figure, update, **kw_video)" + "def update(i_frame):\n global lavd, i\n i += 1\n x, y = particule.__next__()\n # Interp vorticity on new_position\n lavd += abs(g.interp(\"vort\", x, y).reshape(original_shape) * 1 / nb_time)\n txt.set_text(f\"T0 + {i / step_by_day:.2f} days of advection\")\n pcolormesh.set_array(lavd / i * nb_time)\n return pcolormesh, txt\n\n\nkw_video = dict(frames=arange(nb_time), interval=1000.0 / step_by_day / 2, blit=True)\nfig, ax, txt = start_ax(dpi=60)\nx_g_, y_g_ = (\n arange(0 - step / 2, 36 + step / 2, step),\n arange(28 - step / 2, 46 + step / 2, step),\n)\n# pcolorfast will be faster than pcolormesh, we could use pcolorfast due to x and y are regular\npcolormesh = ax.pcolorfast(x_g_, y_g_, lavd, **kw_vorticity)\nupdate_axes(ax, pcolormesh)\n_ = VideoAnimation(ax.figure, update, **kw_video)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### Final LAVD\n\n" + "Final LAVD\n^^^^^^^^^^\n\n" ] }, { @@ -163,7 +163,7 @@ }, "outputs": [], "source": [ - "lavd = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"),\n datas=dict(\n lavd=lavd.T,\n lon=x_g,\n lat=y_g,\n ),\n centered=True,\n)" + "lavd = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"),\n datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,),\n centered=True,\n)" ] }, { @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb index 22cf3158..d4cdb187 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_cube.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Time advection\n\nExample which use CMEMS surface current with a Runge-Kutta 4 algorithm to advect particles.\n" + "\nTime advection\n==============\n\nExample which use CMEMS surface current with a Runge-Kutta 4 algorithm to advect particles.\n" ] }, { @@ -37,14 +37,14 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Data\nLoad Input time grid ADT\n\n" + "Data\n----\nLoad Input time grid ADT\n\n" ] }, { @@ -62,7 +62,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\nParticles setup\n\n" + "Anim\n----\nParticles setup\n\n" ] }, { @@ -109,7 +109,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Particules stat\nTime_step settings\n^^^^^^^^^^^^^^^^^^\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" + "Particules stat\n---------------\nTime_step settings\n^^^^^^^^^^^^^^^^^^\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" ] }, { @@ -127,7 +127,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Time duration\nWe keep same time_step but change time duration\n\n" + "Time duration\n^^^^^^^^^^^^^\nWe keep same time_step but change time duration\n\n" ] }, { @@ -158,7 +158,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb index bcd4ba74..1fc4d082 100644 --- a/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb +++ b/notebooks/python_module/08_tracking_manipulation/pet_track_anim_matplotlib_animation.ipynb @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { @@ -98,4 +98,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb index a53f2d3a..f9fb474f 100644 --- a/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb +++ b/notebooks/python_module/10_tracking_diagnostics/pet_normalised_lifetime.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Normalised Eddy Lifetimes\n\nExample from Evan Mason\n" + "\nNormalised Eddy Lifetimes\n=========================\n\nExample from Evan Mason\n" ] }, { @@ -44,7 +44,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Load atlas\n\n" + "Load atlas\n----------\n\n" ] }, { @@ -62,7 +62,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Compute normalised lifetime\n\n" + "Compute normalised lifetime\n---------------------------\n\n" ] }, { @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Figure\n\n" + "Figure\n------\n\n" ] }, { @@ -91,7 +91,7 @@ }, "outputs": [], "source": [ - "fig, axs = plt.subplots(nrows=2, figsize=(8, 6))\n\naxs[0].set_title(\"Normalised Mean Radius\")\naxs[0].plot(*AC_radius), axs[0].plot(*CC_radius)\naxs[0].set_ylabel(\"Radius (km)\"), axs[0].grid()\naxs[0].set_xlim(0, 1), axs[0].set_ylim(0, None)\n\naxs[1].set_title(\"Normalised Mean Amplitude\")\naxs[1].plot(*AC_amplitude, label=\"AC\"), axs[1].plot(*CC_amplitude, label=\"CC\")\naxs[1].set_ylabel(\"Amplitude (cm)\"), axs[1].grid(), axs[1].legend()\n_ = axs[1].set_xlim(0, 1), axs[1].set_ylim(0, None)" + "fig, (ax0, ax1) = plt.subplots(nrows=2, figsize=(8, 6))\n\nax0.set_title(\"Normalised Mean Radius\")\nax0.plot(*AC_radius), ax0.plot(*CC_radius)\nax0.set_ylabel(\"Radius (km)\"), ax0.grid()\nax0.set_xlim(0, 1), ax0.set_ylim(0, None)\n\nax1.set_title(\"Normalised Mean Amplitude\")\nax1.plot(*AC_amplitude, label=\"AC\"), ax1.plot(*CC_amplitude, label=\"CC\")\nax1.set_ylabel(\"Amplitude (cm)\"), ax1.grid(), ax1.legend()\n_ = ax1.set_xlim(0, 1), ax1.set_ylim(0, None)" ] } ], @@ -111,7 +111,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/12_external_data/pet_drifter_loopers.ipynb b/notebooks/python_module/12_external_data/pet_drifter_loopers.ipynb new file mode 100644 index 00000000..7ba30914 --- /dev/null +++ b/notebooks/python_module/12_external_data/pet_drifter_loopers.ipynb @@ -0,0 +1,191 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\nColocate looper with eddy from altimetry\n========================================\n\nAll loopers data used in this example are a subset from the dataset described in this article\n[Lumpkin, R. : Global characteristics of coherent vortices from surface drifter trajectories](https://doi.org/10.1002/2015JC011435)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import re\n\nimport numpy as np\nimport py_eddy_tracker_sample\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.observations.tracking import TrackEddiesObservations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\ndef start_axes(title):\n fig = plt.figure(figsize=(13, 5))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], aspect=\"equal\")\n ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load eddies dataset\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "cyclonic_eddies = TrackEddiesObservations.load_file(\n py_eddy_tracker_sample.get_demo_path(\"eddies_med_adt_allsat_dt2018/Cyclonic.zarr\")\n)\nanticyclonic_eddies = TrackEddiesObservations.load_file(\n py_eddy_tracker_sample.get_demo_path(\n \"eddies_med_adt_allsat_dt2018/Anticyclonic.zarr\"\n )\n)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load loopers dataset\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "loopers_med = TrackEddiesObservations.load_file(\n data.get_demo_path(\"loopers_lumpkin_med.nc\")\n)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Global view\n===========\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = start_axes(\"All drifters available in Med from Lumpkin dataset\")\nloopers_med.plot(ax, lw=0.5, color=\"r\", ref=-10)\nupdate_axes(ax)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "One segment of drifter\n======================\n\nGet a drifter segment (the indexes used have no correspondance with the original dataset).\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "looper = loopers_med.extract_ids((3588,))\nfig = plt.figure(figsize=(16, 6))\nax = fig.add_subplot(111, aspect=\"equal\")\nlooper.plot(ax, lw=0.5, label=\"Original position of drifter\")\nlooper_filtered = looper.copy()\nlooper_filtered.position_filter(1, 13)\ns = looper_filtered.scatter(\n ax,\n \"time\",\n cmap=plt.get_cmap(\"Spectral_r\", 20),\n label=\"Filtered position of drifter\",\n)\nplt.colorbar(s).set_label(\"time (days from 1/1/1950)\")\nax.legend()\nax.grid()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Try to find a detected eddies with adt at same place. We used filtered track to simulate an eddy center\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "match = looper_filtered.close_tracks(\n anticyclonic_eddies, method=\"close_center\", delta=0.1, nb_obs_min=50\n)\nfig = plt.figure(figsize=(16, 6))\nax = fig.add_subplot(111, aspect=\"equal\")\nlooper.plot(ax, lw=0.5, label=\"Original position of drifter\")\nlooper_filtered.plot(ax, lw=1.5, label=\"Filtered position of drifter\")\nmatch.plot(ax, lw=1.5, label=\"Matched eddy\")\nax.legend()\nax.grid()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Display radius of this 2 datasets.\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(20, 8))\nax = fig.add_subplot(111)\nax.plot(looper.time, looper.radius_s / 1e3, label=\"loopers\")\nlooper_radius = looper.copy()\nlooper_radius.median_filter(1, \"time\", \"radius_s\", inplace=True)\nlooper_radius.loess_filter(13, \"time\", \"radius_s\", inplace=True)\nax.plot(\n looper_radius.time,\n looper_radius.radius_s / 1e3,\n label=\"loopers (filtered half window 13 days)\",\n)\nax.plot(match.time, match.radius_s / 1e3, label=\"altimetry\")\nmatch_radius = match.copy()\nmatch_radius.median_filter(1, \"time\", \"radius_s\", inplace=True)\nmatch_radius.loess_filter(13, \"time\", \"radius_s\", inplace=True)\nax.plot(\n match_radius.time,\n match_radius.radius_s / 1e3,\n label=\"altimetry (filtered half window 13 days)\",\n)\nax.set_ylabel(\"radius(km)\"), ax.set_ylim(0, 100)\nax.legend()\nax.set_title(\"Radius from loopers and altimeter\")\nax.grid()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Animation of a drifter and its colocated eddy\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def update(frame):\n # We display last 5 days of loopers trajectory\n m = (looper.time < frame) * (looper.time > (frame - 5))\n anim.func_animation(frame)\n line.set_data(looper.lon[m], looper.lat[m])\n\n\nanim = Anim(match, intern=True, figsize=(8, 8), cmap=\"magma_r\", nb_step=10, dpi=75)\n# mappable to show drifter in red\nline = anim.ax.plot([], [], \"r\", lw=4, zorder=100)[0]\nanim.fig.suptitle(\"\")\n_ = VideoAnimation(anim.fig, update, frames=np.arange(*anim.period, 1), interval=125)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/notebooks/python_module/16_network/pet_follow_particle.ipynb b/notebooks/python_module/16_network/pet_follow_particle.ipynb index 15820ad3..a2a97944 100644 --- a/notebooks/python_module/16_network/pet_follow_particle.ipynb +++ b/notebooks/python_module/16_network/pet_follow_particle.ipynb @@ -26,7 +26,7 @@ }, "outputs": [], "source": [ - "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, ones, unique, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.groups import particle_candidate\nfrom py_eddy_tracker.observations.network import NetworkObservations\nfrom py_eddy_tracker.poly import group_obs\n\nstart_logger().setLevel(\"ERROR\")" + "import re\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom numpy import arange, meshgrid, ones, unique, zeros\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\nfrom py_eddy_tracker.observations.groups import particle_candidate\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nstart_logger().setLevel(\"ERROR\")" ] }, { @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which are not used but consumes same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { @@ -109,7 +109,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Particle advection\n^^^^^^^^^^^^^^^^^^\n\n" + "Particle advection\n^^^^^^^^^^^^^^^^^^\nAdvection from speed contour to speed contour (default)\n\n" ] }, { @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "step = 1 / 60.0\n\nx, y = meshgrid(arange(24, 36, step), arange(31, 36, step))\nx0, y0 = x.reshape(-1), y.reshape(-1)\n# Pre-order to speed up\n_, i = group_obs(x0, y0, 1, 360)\nx0, y0 = x0[i], y0[i]\n\nt_start, t_end = n.period\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start, t_end - dt):\n particle_candidate(x0, y0, c, n, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start + dt, t_end):\n particle_candidate(x0, y0, c, n, t, i_target_b, pct_target_b, n_days=-dt)" + "step = 1 / 60.0\n\nt_start, t_end = int(n.period[0]), int(n.period[1])\ndt = 14\n\nshape = (n.obs.size, 2)\n# Forward run\ni_target_f, pct_target_f = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start, t_end - dt):\n particle_candidate(c, n, step, t, i_target_f, pct_target_f, n_days=dt)\n\n# Backward run\ni_target_b, pct_target_b = -ones(shape, dtype=\"i4\"), zeros(shape, dtype=\"i1\")\nfor t in arange(t_start + dt, t_end):\n particle_candidate(c, n, step, t, i_target_b, pct_target_b, n_days=-dt)" ] }, { diff --git a/notebooks/python_module/16_network/pet_group_anim.ipynb b/notebooks/python_module/16_network/pet_group_anim.ipynb index 7129259c..090170ff 100644 --- a/notebooks/python_module/16_network/pet_group_anim.ipynb +++ b/notebooks/python_module/16_network/pet_group_anim.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Network group process\n" + "\nNetwork group process\n=====================\n" ] }, { @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)" ] }, { @@ -156,7 +156,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\n\n" + "Anim\n----\n\n" ] }, { @@ -174,7 +174,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Final Result\n\n" + "Final Result\n------------\n\n" ] }, { @@ -205,7 +205,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb index 788e94ca..9d659597 100644 --- a/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb +++ b/notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\n# Ioannou case\nFigure 10 from https://doi.org/10.1002/2017JC013158\n\nWe want to find the Ierapetra Eddy described above in a network demonstration run.\n" + "\nIoannou case\n============\nFigure 10 from https://doi.org/10.1002/2017JC013158\n\nWe want to find the Ierapetra Eddy described above in a network demonstration run.\n" ] }, { @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES)\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.03, 0.06, 0.90, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\n@FuncFormatter\ndef formatter(x, pos):\n return (timedelta(x) + datetime(1950, 1, 1)).strftime(\"%d/%m/%Y\")\n\n\ndef start_axes(title=\"\"):\n fig = plt.figure(figsize=(13, 6))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94], projection=GUI_AXES)\n ax.set_xlim(19, 29), ax.set_ylim(31, 35.5)\n ax.set_aspect(\"equal\")\n ax.set_title(title, weight=\"bold\")\n return ax\n\n\ndef timeline_axes(title=\"\"):\n fig = plt.figure(figsize=(15, 5))\n ax = fig.add_axes([0.03, 0.06, 0.90, 0.88])\n ax.set_title(title, weight=\"bold\")\n ax.xaxis.set_major_formatter(formatter), ax.grid()\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid(True)\n if mappable:\n return plt.colorbar(mappable, cax=ax.figure.add_axes([0.94, 0.05, 0.01, 0.9]))" ] }, { @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Full Timeline\nThe network span for many years... How to cut the interesting part?\n\n" + "Full Timeline\n-------------\nThe network span for many years... How to cut the interesting part?\n\n" ] }, { @@ -98,7 +98,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Sub network and new numbering\nHere we chose to keep only the order 3 segments relatives to our chosen eddy\n\n" + "Sub network and new numbering\n-----------------------------\nHere we chose to keep only the order 3 segments relatives to our chosen eddy\n\n" ] }, { @@ -116,7 +116,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Anim\nQuick movie to see better!\n\n" + "Anim\n----\nQuick movie to see better!\n\n" ] }, { @@ -134,7 +134,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Classic display\n\n" + "Classic display\n---------------\n\n" ] }, { @@ -163,7 +163,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Latitude Timeline\n\n" + "Latitude Timeline\n-----------------\n\n" ] }, { @@ -181,7 +181,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Local radius timeline\nEffective (bold) and Speed (thin) Radius together\n\n" + "Local radius timeline\n---------------------\nEffective (bold) and Speed (thin) Radius together\n\n" ] }, { @@ -199,7 +199,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Parameters timeline\nEffective Radius\n\n" + "Parameters timeline\n-------------------\nEffective Radius\n\n" ] }, { @@ -235,7 +235,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Rotation angle\nFor each obs, fit an ellipse to the contour, with theta the angle from the x-axis,\na the semi ax in x direction and b the semi ax in y dimension\n\n" + "Rotation angle\n--------------\nFor each obs, fit an ellipse to the contour, with theta the angle from the x-axis,\na the semi ax in x direction and b the semi ax in y dimension\n\n" ] }, { @@ -338,7 +338,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.7.7" } }, "nbformat": 4, diff --git a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb index 34047da4..0a546832 100644 --- a/notebooks/python_module/16_network/pet_segmentation_anim.ipynb +++ b/notebooks/python_module/16_network/pet_segmentation_anim.ipynb @@ -37,7 +37,7 @@ }, "outputs": [], "source": [ - "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\ndef get_obs(dataset):\n \"Function to isolate a specific obs\"\n return where(\n (dataset.lat > 33)\n * (dataset.lat < 34)\n * (dataset.lon > 22)\n * (dataset.lon < 23)\n * (dataset.time > 20630)\n * (dataset.time < 20650)\n )[0][0]" + "class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which is not used but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)\n\n\ndef get_obs(dataset):\n \"Function to isolate a specific obs\"\n return where(\n (dataset.lat > 33)\n * (dataset.lat < 34)\n * (dataset.lon > 22)\n * (dataset.lon < 23)\n * (dataset.time > 20630)\n * (dataset.time < 20650)\n )[0][0]" ] }, { diff --git a/src/py_eddy_tracker/data/loopers_lumpkin_med.nc b/src/py_eddy_tracker/data/loopers_lumpkin_med.nc new file mode 100644 index 0000000000000000000000000000000000000000..cf817424da673378c53f682e9ed781d6bd655d8c GIT binary patch literal 244130 zcmeFa1wd6x+cv!EmPVQlNJzJIr*x+v4bokTN|!W9mvnbYNQ0DuA_CHlfRrEtetWao zp5r;sdE$GY|Nq|ic^P%jteJc2npw-a)_t#;P+3W73{+xN0N5cT1KK0Ie2!ed%*DS(#aI3$e4Xv$1ep#<)V$ znW2Gy*1yyIKJv1w7+Nqegy38-Fqa(x>{~+0fckKF;2vUAQe1Wr0007RA_Hzd1_>b` zgHvE(VJ|a+dqM!P%z^Zt9MbupxDfuS=THRD*)D4bAOesiF#%ovtQ8yv0H&XiWIzyp3;PccMh9mF_Z9*g761Vi90Lmn z3j>b%@dF<5^7=B2#tRN#CS2wK4+0Fp#mddX!q)t%1aMm*@n+^0w#KgJ;4GI8GT+dz zW^wCPEOhoF|Me)9gGIrmg8!I0*nx0#y~I26G9FTpqOqmDwX2(%IncoZ=xSvSv@*B0 zv~mT(Zed~WY;JFA4m2@$^)NTLzf3jvbaghjbF%|WtET*h5?{nw%OLf z-qOh4*v=f%)XTP`{I2a{c8<1}ZAYhs3_4^&x`8_60ulpHNHB;{AnnjqK({;S5(9t- zAOyIW8rzy1Ss0tTIyi%*0N6kgP_1Am3gAKmaQ%SX7C-MYXfaTqS%P$Hk%w3WT^2m2 zU;t>K8Ak{(HZwDFu&{73zq--EGY(oRxNPV>3oQlegVVqc+(raYX_%ndMfkgWNI_0r z0{|cbNq}eFP0&AZsee`4WvO66unfd2aLtHPmXMW4mxO=-e+hz}wf$u>Ht^36i_6Sc zj~Hk(p*}bbToSnBS0x#H{$H1*Z4{O$t|2Q4dZK{t9`H;zqdx)PLWqmy4$H64?90dcA7dfUb5zD38yad-QsDZak4C8fvkFK{fBOag z76gZ&kL=4{{F-!qb&bF^0@ny!BXEttH3I)32!Pip$l!%301g&*=8BPm7Xjb}0C*({ zrVW2wn*D3*QYcd3(vW^D?dS6B|E@GKb%}46OEd&+nCxgV{US{ZSqP68)$6E42R`!@erMK^vK@+Yh^Xb&;S0iH= z7Y9>oV^@%_4nPT!PoAH=xK#M*U1k8iRQRKon;%>dL>M3eDh23nZ0lwYbaeniiv#Jr z?ae`YXOQw4tjE4oMcack+&~8t7jtL#%lgnk?fd&0X>f~wsRvs8rOg6VckbZ;t_Y-t z1S@#K1`7W^1uqyYLGrH)mvBh_qLckEYInhk=>NKQm$BoU(Z9EC&+gi{{gvi*9Xb|Q zLl5}+mq&jJy8!XU+Rxqw#$U;izOV&=N#o+-e08aot=n|5maAm7ZawP!uc_;83#yT4>$=Ib?@82S0w|^D`0D!;4w6L}X zE%rfN?CR4_3kPT5Pof9?AQN2_T=(B25n$~lwB5fEF^Q}1Ib|O1r7ec0`2#N(cFwX$D{J*n(JP{VUXI&g1b;GqWV zc-Hpz=FUb}Cfr7l;YBqC3jqMh9kT)dDnTojf#ik$UK2DZ64(AU0@ny!BXEttH3I*+ z2wW4i|4jrf<)7Z<2D0o&1|Xn-*U?~0L~zUxc(8Wka%KJF@7o|k@`FYH-WE5u!jL=b zU)bX2a%Y&o#et+iML}zoa&`tW9#dy)5DWUnCJ(gR&7XMC}U?WY|1}Jl4zh>tAEdKH4td$|J(0Yb7eM~ zqP8gX3zwVHh4cVSNx%k=|DCyIsfze&HM<7LXo*m&#xT(;qgx{BQAb z6>iX&$folY=5x7R+8i8vB{wE{wtA6Q!>rN0` z|4ny-fGiU4H**iyYV-3JBG>{6DrxyE4)eF<}AzLMpy^L4f@|srXx)zW!_b z@v|czi-GCM-`eo?Un^;YmQ7q;zyaYmQjy~hX))AP5?s%Jy~PxPTVR^qFO1=Rxv9U> z6odu9#DXmOE?I^;gZ5hSBH3S&jKA9G0klWRFAhpFzzpu%T_bRfz%>Hb2wWp@jlh2< z0>6@s>x~}%)3)0CpY)ZH@s-t*-PNrIB~yRr)HX`6DD;#(V^3@FxmABU%1jruu>jO{ zj0L}!R+Ci$t^L8KQD805m4oin_wj1!dvvAV`ndx?bV+>WLpRok=Km`nB>wY_J%ru? zK`k!#fXKrDJPLh4XGJJT0}xc5L3Q(kgvkK3;dk)?+Mpm1(C@)Q2}0G{WVp8>ByK?*?h&5Z<5^8b2wIsS-0VSwNd!WiNp0}28R;vhrvQ?&#^ z9XdI$;Ql$`A;a^tXvnyL=l-RGObY1H5LdGSdSb`bJb@n6aWz5Epm`zF0lGEH)sz75 zfN|*{6BfK3$)$sgJoN01t4aRz=7UUh=m{BDvlebXvp3~2=Pe-)@thtHP?mb-MCUrVQ@WhH+t!Ktnb@o#_82m=otXB1=TVV-4>YW|r` zVE^IHMKJKtrwHLsOYc8?FS#cGX*mqYnjPZE{w(SOw%#@}2OE;dULB_c13KXq;;?>` zha8cEqXkL$C%@E$0d40CamdiwuI&gd0s81d0Uw5Xc_%cPKrn-Whd#&9?cMC4y>e+Y zhu{YT4}D&sfs-8_tnFQm%+bxEnW2xl-=(@VK7!_jKJ~DEw-v4t=v-P$R2QfS^mzsE z3Ob<|efVnBK<)mk7k0_>kWT&a7wGqE%=HVqU-7uUx<=p{folY=5x7R+KM?^>7`#;D zZU$q#fVf+7v9htCAJE_ayO02t-2UaYBlOzGrQ^ z9UC6BBbV9e_kk@k7to|gbmMdIc5(?L)j0;oFwyd0GM6k9X6CljxV8Gj$gVp;>*kk- z1zYRmEsvuI-BC0;-x0ppLHp#|OX+AX+X&u#v!na1D=bT)NpLX+&vXy&H|W)rgtshRRP?;I`~YYQCaO zcOl^-ATa*fI0JAeu}b>=^RH8Qox(V(=*o)1H||8wO`>D~%A`yg&j8%DS#IIG0TlYh zO&!5>C9o1N(;i${O~c+3Ix9X!>#NPIcMe&WZ=X1GGh#(Bq)>K9QWKCgnVprXSjMp_ zhHqkquj8VKd;F+r{QSJn3;=6F&iC!cqH|peO3UKRj>EerQPO#^;pO*zV)bC6T{YZ~ zb-Tu-y1yi12kvw(dkbGmNziYCQsnIZQqo61`yRXu|?Z%Q8ue3DhD{O1=OmwhF z;$wkdd_3y(9U9d_#AG|YikuUpK_s3h3KT(YhE zqEj`>HQ?;lL!ewrjtD4q4Qmh~2$YWrdto#5f`&mmJ7l4ZX|193TN;8keO9_Y*Snnl z_bKT0__Hg6gKNwm3|p3tjwbdM3D@G5jwo(oHzXtF?cDS4_~I$+P~w`9>Q?!v?yWj0 zd-~gZIWhMqD?gO;*fqOS7`%9}^nx5Sa>AMgzM|%1L{Y+^wPR`1F}BS3`HNmOVPpl> zo6{=VvTpb77v6u<3_utN7ppYOM;4gHTdpA>+%a*CD}OTZGQnU(TODgT1brkveoFIG z+qbO1VKd6-y4K-(&VB(}Kk4s_&DV2@7@Vn(0!sl7_aZKsTjeA#z}3xY^BbE5<;{l*swe z{HRw8G076QTg136NR1kmi?g%*oXjlJN|sDWapGr^KUw4)l&O=Ud6+WmF%HaxnQ{tD z_HxbGp}h5qGdqh`C&lXr|EHswVYetUOkHJeWJy8RPf zd6H^ocJ6vM?I7G9|7p4PWN!^l> zha(2=p>gJjE2}Iiuixrov-1>pD(#R*k+hnswsNq@(FnBTXSmn(g|i^n~wBBLk_`8dWGJ6|zmO5W6Om;9$ds;v37`0PWvK(0CLB7kosQdA*L+Dp6?i(ryp|8oUN)Ax{ zu{jU3SUzrSqCA^0RZjMK-XRqyvYF&ql4OD5M&Bx1O=OJq_-UXpg5CxKLa(~fen!rw zUu?nyz7*aHPxj*?;_z~gJ`0Le&0zBvW37U>c4~}eC8Rw%CHAb+_~}fX`4&B&uAnG5 zKh+GGbW<^qYUQ5~a#oIJ6G-49Jd1CP^)Z&@NdW=^E9UP<-K@1!Kq8M)4tC#4w`h7d zlBwSvgFT)hA?Tb;Lu9IrPg$5#!h9;8yXXBT?c7mY^V>cz?u66?P41%0gv{Z_P?pTD zGlh#zpt^V1o4QW^1mgDwTjlQeS37Cuu+5_weCt_TEcRLrpPZU}UiBJ>U+3Q}Kzr1B z>&t<+i!lG5rra*Cy=!T|R0wy2SnRu$EINw8oT3JLHQKLnd#*)#gVEHC-R`r?;+S2| zZ-)9NS~Sbo^~>%*X?+Hc~WI_Cx}7xWG(gUmp*Zc zX&;Zzfgdwr-7JP;Fz^#NOWaDM+;i3+z_C4hFou`X<`(10o?4z+D@Su?oH~kxYo(fA zTx9|Ln9A!EACRmTk-r?RYQ)z=ki^&4cVyLE{;fT`SA;KI zD;N30<-lZ^_D7yU&Z^vzv4+8xx(O4Ag5*HAM@DIPAlrsX1NUV%+~{O`OjA2r z5u=fl?s|~^=NE5hNvGTvQ)?1k~`52P{KN*>yP%w1Zl!8cuk;FfH5mz#EED%XG z*hlSF-i@z7>htej7h{aw^F-(l9$iD?dF7Jq%u)_(4YNoLj)50Bc&=$rRI@U9AFqXD zcgiNIr0$ky4-60zl(D-sA#?8Xv9+<@iC+%+V1)KSSF3?g4nsz)!#e!&S-doF|Y^PY`tTxIb zQM7jE)21_YT-l;BpUj>-NjcrQ$7a*c+cS+ip-ji9e!A+mw{^FL33Yvv?0w2w&HXR_ z;$&sE3!UxLiiS9Hp^1;$z50Zk@H}@&dDS-*JUVGgP2a-RV%d0=29=6v=LXu`1FkbP z&r6PZQ9XinnED`K!BDVQCq}f$VOE1=U&pfJnjG#K0%Kns@Lr3yJHxO_t)=zJvFb=( z>>*>u&e83N8N5=Jl0G*<5)_OqE8SweLS=0F7Ggn9pLeB0?aSYM;=C}|j7p1UJ+cX@ z9po}hZnp(#hpMBiIdwhShtt^R!pevJRM}2n^R>)N_Y{rcx*8qy>4F ze|vaKI_x`;&iAPC&OAI4J+{!P^dnOrcqKxCNuBRESXYQ>nGBhts2Z%C<4fZNI!Q@g zYY-kAJdMA5Ly^!NFv1WuYRQbtdg}2cqJo3PJ?8~7y<9l9XPc`X$~Q8(hj*HtgiJ;j z1ReKYCVzFqO}$TRu*MBEZedds^R}c{xTPz%u0xsRPNjP;Hw-f~@x3=(FM@K||BBj4oxH@<>jxU=z85RWiz@2Txq%${A2u zzZkTajA>ik-EF>IGcLM%?|~{q)p2Uwrk$X?lUu%kAgxzFE<0MS<=HaAZM9|6CWKe~ zwYymkz9%jZn1+ouE7;gL(Ev;RK+lMx!NsNUO4_7Y%Q1mn>6nw9~olw zjcxd;w*QxF16~%!~{pRmP|oNg2VwD+qSS{XZs^4-<|r*S&qAIe zvep>bMB_fpx!vfsyX1$`fh1BA_+(b>qpvoLer|e$K%o_t7;iG>?nOi)j5b&L;$jR( zoOnsz0S}cA^QY18C5Sgqm3EoGg^AmFyqZ579~x5Xz)Vl2^-?Dgz=ku%wy5aT2+1=* z)<1d^9HOUDvTs_^>F3BR#a$_U~-&0=d_VC0S!`QB-Y zpf@^eX>RW=s4LLrIwBwfw8R$BSSNEMu48`8J6-UW`LQpl2qO0pLz#2nv!3AxZ?;kO z81n)~&>S;^*Pa{pDhz~>9}!e5DhZwf)L0^FhS?2u9heDLOFrY4g$FIglc_)TcraqS z;Xh(eQegXNF^J#dd%covBuuW^ust(nym`m-?}7)Ub&V`iRN@}abAj)Olm??TLw9!= zKYcw>dcWE|G9Tw|Q>VC&umA^#QS>i_)uqa$n&>_||p(OdzQ*An93vix}oXutgM^=0YO42--A%y zUfrBU9)qvipXObZ?X*1)m+HItgkaFRmKwu+UrAz+CWU#fbhwiVsxfVS7$+_z#@Bjj zmm(JYmQ*MA-2LTXCi$nRIs*jH;HZSeCtlgSRm3odUx0_+Dn7McY*pXCDe6gfiK#yoWUl;CQXUpR<`f}^$_XiJ2g*#6>s1t;n zi*P>AM4b`fzq+FX*GUwkgD92|xvOH-C^FzJdvE86#gT0vA^j!KicO&4G)5In8S%(X z(j4vLWvUOitUCwVa-P?PqATzL`|PdWuSrHyIo4(%KX&)dk|cdcf;Jc5Nn$}6NQTKv z;T@g-h&IK~9V=L@$b^>9-byr7>jlo^reM7OgS7YgaUai`xDdy68M)DumJ`-L<8^!{ z@}>__Az5*{o5M0$rql3NSUVt6CRH50HCm>xN^M}lFAY~sv<3UsG3`SJ5oTix6(MI2 zc{$o_Lp9=tnX*v@U)I3!5h~-EAfMIN_X{Dk?xjJ5QxaS*!;X!!u-qF#W&C6zMNuAd zNwiv;3Qx{vnT{O`Jeg6}`nXxnCEF7o7`R z-gMG-2^(c~!JrJCDjqcO>lUi$enEXIE9A*DR1n)wDTdPchCcnsbeza&3m++@Pw1_2 zt}g7e&6k`7#b?iKd4=?CxTY&ncU;JmMKN7o0qo_%oaVJdzjFp=&#J$wWLht1unRgh zU#!DLNe&(_`qJAJE5lGSWnlZ_TShEzQvZPveDjmJ3&W8t>~V9gZ1yCp|l$<|BU(lybjAm4a~$=SlxS zCTMXmgcjkEVXIF`r;dJ3JQuo9%*qrd=nPS@i^U_&7kdRRs^asSkvl)U3Ck@|<5N8) z{Y{o=^iDLED{}iZ{5UBpU!pI@y*~Cg2}FHWT$u94wK??b;f;`Cc{wY}jz%Xz#??ji z0VKRk#X~DJ*Muj#Y&qEEfg-&7g{CDkD4>;9!pwuBvSkNpdY$(It&CQd?8y|EFsJCk za_S;&Djj@z0rys<&ZUG6E-LKfb#K?meY6gZ5~NfBvyfqGa#GuM!B%NNfC@(QlcWYf3(DGE)@Hc5`YSrHZ6XlFBVX&-+OY~#?0RJ7B;;Q5M9 zQo*S(@l>>Fx^_I=Krwy)1Wpk9hJin)xxkL>yN|vG%F>ii#oxcaFgO-%Iua{4!QP9v z<8Rhb_r;dS5qN34nWi|mgj;vo-N-H+QA1)#f~sMo^|@p69sY+Dvwpt;5vq9N_Zks+ zbWu)q2abW2p}fli0>*0&JDxU@JrekluZ!bGK(8Hbq~`H2Qttd{`@kNzP4B<8)Z^5% z#q`RqN9SBI-`mNmF(Q>o@y+At@7#!5Gc$vX#8xj_X8dGneTwQ^?jidwlE|4m^f&R8b3XPI8NjJsb=ZuY2wc zPy8%epf42*$DBbS z-Pst(!FRtEOtAO(&ZF~KW*iG#pk50(u!1Gp6`3#qOIkx7i9|U ztw)*UC$1u1k$?LXJNsphf#ywzIi;}sJ`23e@}FNCiEcdlY%|29X=l6QfWmddL)*PZ zsOgO~eiaHy>0V6lRCJ z+L0U;2A!@4!s{mRN^-x_q5bqcrG*x8OS%vERgKdac8py zuX-A`bRY@L@=kkmu$i~pr>a@xDVs1Yp(XEiGnok`ZRQ>e@A-Q~+VmgK^b9KF-ZalW5EI zJbR+?ZP+W(L#=t3u4qyb$EnP;G1!bbAm~~AzK+B|Dq7Ks@$rjIEGeAri07GX7ISwR z)ziGl90=HD=>cN|iWc9O=O+0@SPM5PM6{dDk(Xk9W0y|4w)U@*AW^!oMJ3QM;n}*$yJYk5Stt5(qsnxXev|@gOmk#anl!xuKoaY= zD!hxsjk>J}>3btAJ51u;n5W)kysvP=q8EL-H?bRyG7_0NdFBrJ#?~gMa?Ay{7I~Wn zCy*?POJV08l(47R-#ug2Ow~59=6G$Y`3h%)B6Z|DTBo;9$qE49+^$GE8H)7} zuM&HTB?0&QarX0-45%N;B;R^kU()d=c#&%3_RGz3Yq=5Bn8bkUgnnf>LMQF(=j|u3G97ZNx1#fI&=vigWEuem#O;#kUojAISFp5V_ z)wr*d8%tQGh!Vh@_3SgN!&pavAy4}DM;AbitA7|rUn;z%!F>x336gG)fk09~3_W~f z`$}lk-jaR@7OvtaT1}*ZHtu)^o;{Zf!`%~wcgzOu_Aw{K(s2Tq-h~c$K`~HZJm$pVSn73R&|;9l-NP#7qjxrL zoOa1vbC2zLpH4GKStvYvz|S?TUoE@5X7^H46$v@;No?mN1GO;##nSKiYba4c@M-!W zY~SZp^G%d!dHUSU?8By%nW+O8vDt|{cDPI-w)sPY8~&|~JVMAhm~iiKTpD<8e0t)O zDpA_C>#A;K(^svC@5H%1YXB4Ya5Nw%(UM6t(NQ+4A&QIpRRB$sLZ#MVemozs3&&@{ zVF68Syg=0_Z}RSyi%7cRFh1FV|1nK>GAiVuDy#V2qt0q$;%rO7STT`|hrWYt2N%_q zo}CT1G9hSH+c#04=F4iesFc3J7KxVIf`k`Y?;&HU(8?{g%GZf0?H1=SRGe6b1nvxo zfoIQ-IRlXsmaroN_a$ zP}pQ!@cSMrbuch1vcS6{#5v^@c!G-mjEMGicd+;)I|0$1}1rx7f?_@aF zR4{BiftbX~S(D<}Xj^aM;BdaAH`zVh_oCnC2mAJEa^K5vHQ#&WYe8V?r~I5|a!h`( zBVi?PMjM}XbMBTD?FVJ&>UH&cga)N19@&BsmD*w6XO)#(Skq?vaR-#ynl}#vJu6nZ z)cG0QnUx|iOyRc|-{lf;x_XW0^y-HAsi=oN^G`p!FF*Go!Kg$Olka1t)#;6Q=Q6?#0J|$w~E2BRcTRM2e!iPxdvt~TzjE=HD!OAn) zg(ChVPRW%tQoMtD3>#;*UuhjX-uKz6XUs8nHQG#AOGg5xRq~{fcQ;xK%pv=iHx}yK zz=ypx=yu;J8pZP*zjA95U7R!aBQ)G!#We9341JxHZ&T;GcSAq=)n3xL=Twxqx>|+V z_oha?On-!jLU*lP&Cou|>Zm>iZW^esq>A93oDa7>;YBh7&TyU`iPSr*v};xP={;dp z`&N%unHu(*L!LQht3aphtfVP-E4f(C}xiUjc+~R`|DbBlG`c5cMDr& ztPXr!9)133^@QQW7lNFPFI0FR6)jp@&+L;=n_X#M15XUTi^JwAm<~?sT!@Zbm{5A;##Zp*9DZ3>hIudH_-gY3Y|X(@e!4xGw7q{%^M}O? z{yO-JwjE^ehaP!YS*&iR0&y@j{zem(H=XRUTleSG5w;^3&2obMtN8Imv35!@;W z)g|-A0z#!dEQeML_}dypn6{?|NT()x)~PH?wQ0F-Y%rcAVEL=coKs5=#C2HH6{Ku= zxaz+*X;)&c|9p!zWp9ER?JQ`Krm6kQCwDkP-om@UJpF@0G=6^k0k2_~g0j5=Q~b%e z`3ZXGCkHlb`_X-tk;N=zD1xX{15fAn+rnh0T^}1Zvf6nx_r0^Yx4e=6c`$5t_qe+I z_RS1l=H!jij~6pT6DEr)lDz{mlD7nCB%(*o_JEkBrzu~LnWaz&^}pHs>hE1K-?tfVDQRYmGG7GUc7Xw@%6zW>fz_%En2hv>8b8LbF~VOPh0LX5grfT z~$?E{x=zfK=wp$ns7r5?|PasDR9L9VL7HPJf|Xr*UJL zOGcVdn`f7JA47PzyIT-nGyRL@wun>`&#_SD9jATjTH>iSYcm+yhqMf4HEHN=C$(z~ zILEAOkE&1?NDt=&97z3-ch66=QCsfF*^fWNFRp|4sMlQ#!%-=PiyjwtfhTQ`9hP-T zB*O84VZOIW;iA#&7IXT##}99)DBDV6Xp~iF+G%)@t*G5}cZ~lo7CIWyyP3_ip$*pf z>W@w0nf0|yt)TXW!iI8|~As8yJdgdD1-SqwRPDe>p!zK(m4}h6zJe^amr}$R-<0pzka* z3kTA7gag0AbGh!f#a5#sijK%yYlkMd<2c!fFX6ZNvX`B?%YaYiM&d#sz24|Z{cOFPVsFwz8g>MM@@)L zqh^I{iH`i|64CctwW#4?m~~AN7Cho7&n6G^kPG7}*K7^rzOeT?Y1Iwi6FI}RIH?+6 z+?^VkBQ$$??yA-B+BgB3I|%DT?mNSvyL|8xDzop%nhg9{>PSBvw+XB=D{DP|k@nJe zMHVN2NLE)Q$hlV}LYOvX=);i^@nb)pRt8=Sw7GUM#fT@!nTj*{Xj48$NshDM8oFa- zbfm_(8N;-Qt-A~BGBrno#XF*&Kbw9gz;!~-k9O!9%a!l~ozaO#h&EYPIi>jE2GKIx zsQT0A#@t)^h-R;cLz||2+h}=Vm)X!=qOlZ|3kGh6kGVURObl+u({X#eke2xFNEdSW z-Y37}413_=`}vhZB1T^+(^y{zlaABDHuF7;j&1VYiha6GH2-^4wW4==Z#{3$cN7}n zJ4xG+51n50vd?bP3E3*h3g?Z`?Z6CHJxN=)~s^=WWYF zaiU%02WC%bcs5iDZ!e)Ms*E3E)xUW_Q!C#d5lmeFnLM@(3HW`|ng{S2DIGCE9LoWr zx(&B(>m6!QQrRuC7s`>Ts5U0|^E?(tm3Pk@58m_|AoiwKq8MBZ^{3hVID+91)MD33 z8voH9erW!pGV>dvN_CAbNm{akDgOl%KKpsP-W1(z=GI!}mplU0{R;$x>K7DVZ@UK9 zn_y}J=d$-`stbJ*Yp7b?C;J2=6jOF3OpsSNpLM)box;cG`zpjt&ir;mNU+>vsmNtV z7>$bMsZ1rmSfwu+vFIVA=11D#x07f?Xx;0z@>+a@h&6mZ`RfwB5&W?j^;^zC2W#tLY)$r({Xvb?1Xd|D>3(;~Q5^l;QopkMu20QWgJ#`aRMqSyVe93(h0c6x$fU&*$6-!k_itaFd4Q?7=*YiklS@E|+NmLG(DvU@Y}o0ugU4MV>HhN?8pUWP@bLweV#KbuGxmKDO1RBxNv!TU&Z0@$1>za_!G~{ zIU|y_fP^9Q=O3rj(FK;*#RlGdHbmeom0%#4XCoXu+y8I}PWy3zVc++xZ?iR`I`qRP zGJE|;H*X&|&ZQSU0&XTNz?4#tn?~C#vAX|gCg$pk(Rb%qc*s<98W5z@Cb``^o! zfa!@0#iQ!=*ADxtlgp@%qx)1t>ZqfgV)=yd-d4}KPE1an7sL5-ooR>Z-H}?~gTl#- zqv=dIxEeU3LU97u8s%Ip)!E>T$ekEU20AauF znxBZ>aJx-kO3pX>YcqqE*QwZTgQmLenEP2W#o2(&Npd->)LDwo+VNoqrO_DMT^yq3&iYZphZ@20)X(T8*dz^#C6ZM0N6E)N=09wETuDag zT8C+26sw!11^1HUh_U74tYOJkv(bkast&~31tufP>Go5Lm9RWGJ)aN<`P$rLd(%na zbh(m>h+vaKt$5CM-MmHVb&!Xb>2C~UzoIK|dyQr`3Pk&n_xEQvW zJ6&~QHE};J--mCtGR@8Eb$klz4+dRTn(qBNtrsl-2ITe|((ADLRi_D?JacKqN8E`R zQ5YwloDJGHjHxJOjsS8;nP?SvlZF6fFz8lubtQnTfh^RW~s9V5K2C zUXfryw$PHkDoHC^W*ZI32zRh%f%RhOZ7JKw`0SMRFZ;+jg?o$Nn@r1amt00(Y8#35 zc{S&U8w;9B8k2UpJl=`YwUI-#I$3n$@MOXylrEgF$*WAczfAJ^)w8?z8I^!Xk><{= zoo$jh6K%4i^<4MRU&ONm@pgD`jz?4VEhWCULjF;dZ>9j`-dCt{nMfC#vzqEYg)NP3 zMG2D8eYS8I7ocM3F1(RJJ`%d`%9*N@F>WGDP`_8p?wIb$yE**O)uhZ|O8U0WH`^!f zRvi+K$16>@tkO8-2k6#Af|ZKZA56QE9uowiJ4;nX?#3`wbi3f_`f1`MWcfO0e+zqm zXIFi!Lmtcj-tI_6;n!j+4Rn#}@a=Si8Xdj~*j;CJ4rD5KEw-~$`(W}sEWW_151o!= zTj-1LtdqD~7W$Vo;rMETI%#HPw*)Y%E^=O~z0df3`@UD>@Hke{{LQ1>06v$^tuLD7 zW==mMHO~FVwYp|snN7t~<`(Go2T|67PiVard}7CZRHSShT#XB>hgR~%12;NeT`%DSr^N$iPKE3lzI76twb}Y z{A>`SNP@u_p90tU^NJ~R*WO{cZ@KJ>EObs@#%!Ipg#BP=UeP^_Fp8gfm^%7)`+>4N zV|-|N(X3WRNb0bpDrG&gqfoWadBLRf-4j!s9SwutJL!Bca@EXRCNVyIS0xr8D-dqO zndxHA_7W48Feq1)yK%bCUFQI#?Z+c{`*jdK(ez`?o{81;CV}AopsJo~RdaKXXx7#p zBbulrV#i70`hMwUuOvc!-Hs23n*u~G`j(hol(lgf8i)q={5Gi$vlWZ6`FnHxoIiX; zf1tac2H#uW$G;b0O7poJ-=vU%O|<}KyEWJ8>#oeJO*fmFZ+p`Qzg7OcQg@}= zGgLro{tK*t`EAW!;r%-OC?<3yJ!D)(#a3z}6}TU>JU+J<@p%fPId}p0y1sf#Vy@kP z)-B3+SE`8}TZ0K#bctI-ii-Eg7n{ZL;j&$m048$Z+_trdq_y>LL`a{UMFLAP;^jWL zzVcpnPORn>VLc-wm8j;N(l<&hwRzA`8W?{O)1lmapf_WXtS0&x-O~>at1z|2bcCGm zD~4}$)!@!qFB=oHmO(ition`f#8&cKV=6`ZgO?-$c-{|8H)Z4!guVn3^fG@wdo%rU zA#NvSt{$7&Qp87~!AKmnLtI4r>{!%!24zUZaqplSIp-R&(9_A za%NbL5Qh??Cz;P>&;?h>$0Y4GNvw}PC^0K-5EUximkM&ErR*Zg^&WMO%##;TYqcQ! z&|RuEOXkJ|OALpYqjG%McW1o#Q&DNxb9I9T6PCcC@wXr3>^7h9XnSl~!T15YZ+H96 zzO>lk3MnAx>S99~ztLK08ETj78#Zot-0~u?zH0b(|2pf7vhTv1izX9`T!Mw~8>S5h zz6IA_++C`*dG4rCLH;AfSy1g64mW$Vue&WF8+Ee*;c`x(cW9sEQngm+N^~p1pyxAG zy5MwzmxmT^IL#i6uU6Kly;4YBa&Jnbg*0GG<^Wptj-F5?KV-E`C6KKrqEZ`rl$*SA zR5PCCI`!ES3pttAB$m=YcQtj@8}^u_^v!{6qX11wm9+4S!V;2U|G-h3-p>&V9QYAc z$d7)YTKW%_vV7huuGwI72wv9UW-dZ9%u4%sJ3kn&{$g)pzr<}|Dk=Bd=yc%xL=)L~ zuN+OcRl8p8jM2~+qH+O36ojx?7!LJ!2W|v&np^ln`mg7A6BN3rAG=WqZ6^*2UF_Z= zu=16gX}n8dtBL+PCIzSwSsc_5;0NlbXrY>cu&iD5M-`1R%X8Rea#_Vs;~ydF20 zHZ@&Jcti&Z_wibIqw_?S*}(hNl@{G&8ttjXbZ_UX4VJrW6;B6<8k@wE8dR8Rb3W29 z)}KdeNvU+cGK@mWyqhPqAjT3;(lr{TtEukXKVakk)lLk7M{V()TGZ=0hhT#JFv~r< zZG1E(yLw{G+phL&g@Zukr`DbAell>`5(<+e%fgb>Iq5lp0?6NJDOTGJw>uBc9F5~y z+LqPoum}M6<@?b)tn{eg;Uu|G_-V16MdFcI#n^Qph|R3m9Uh;OpWJBOR7`eb}=%tW=*Os64a=^7)@DdED^Jayf3Zwm7XqP)!JR-5Fs~ zcNT&r z{-Gwk5N6E?cgLJXj2oUm+nY3W$y__!hBeju|8HLBA-C6#LE@k*%z1qoZLsFQeC;`stk+LC!?9c{5x^> z8wYf-o0mrPI2`Q0wOoyF5d&DRRXn)I_z`@^RAB2$niK8YR7v|O2;8CFNEr-fzNb3p zVmN*{J3YLThS7u+LKhk=0kjCTf`%e0Z4hov)i2y;BdnRmKy{(TR7<30nvD`eroDH+ zt}QGrCoYNB)b7sm3>71ZZOXNioi9=Rqv#CWk~;rB%p5Z_W5g&TB4^~t%MlR~G3F?9 zMC2Hemt$n)7&&GUnGqS05hF%K&asG$$e0ls5s{hsBa6t0%#6s4$jC8flsR(FIc8ih z?_a=m&H>Kze7>Lieu#RAB2Y)6w+!S|H-OUsLtK#G)=3#M=z_#bKufmTW#PjJl>_%j*J*t zTy92}zpKkXrXB~@c2Q+DQs;zqd?YyD&CL*}H;-5=#{@-ETvi(zDnl9u6Q(9&V|yobkc_?qN`2+Yp>pA#lUFCM_N+6=Hf))LK#_N>&Ey02G!L=9{avKtCgwnP1UiH>C|%U`rtAa;bZ?p|qOhq2wybz5HIZ zDTa1tqxupx!Xj%r2Ilc)c5!Uwl894~;WHb=XzCy}pMe*V>icLQQ+@-bBadk9fv2|2 zYCa=DH_Gc}JVGo;=w%MOyK2j*O^`-pydtM9m!R*?Y_}FB2I(y(+OWBz!d3|B5atjD zE9(JTN}?uRS=?79Yc|9EnS4l94BeHDCLl-cJ#uhg_poeGTbyUYMq7AEKMEp)Qz13c z3_PKyvDQ2qWx0sd9v@JMv;YyAtz4#z-q2PA>cJ35U4g-%Pgrg;1N4yM`mrXxBqLJZ z&=zN?)VU~}I=zi2?891yTsZ~f@d9mUEKug|sgz2P6XRB6v>(}NYEv}&at7#5Vrf#8 zgXQBaby@vHA3!PYru51)yK{VWNhX1=)cK}^H^E4 zaL7pMpohqO3#1}N=C+J0UFo_Gtx8+us!menxk7%2co?gP%17%7-4gs*V`+Al029;* za_W*A*MN%EADrDiC@418 z#iI}BdHJ?dF%vGbbYygw1H@yU85nA6)u!uJ!mO$C5r|r)SJ>NGX&Gq7p<>nj9bE|ZyP+)5VT-tLA8>wB`qku*{uIaF9% zuh9<%q9}SF@{LQ&COJL!n?JDL};=&IE#CD8>Y`rba$q5mFDQH*m@t?YV6ra=zF` zty5Z1f`K8hATT6b(BY&2TOXy z9*lHnKG`^&13>q)wsbbZorL1992caw8UYdq6^mWxi;5cJpz|@&rEr{Vv4i3MK zNN!B^m+ZW7qpM2X6GmH_^V$(kRe`HFQE$pE!Z{`Oyexk)EE`XTvq$^&JebE>m@CHs zYbrFJK@HMfRit;Y zpNS0#N=tQRqqSImQF)!5hR+4ZdWI0`Fom<1Y{z%fbF(2Sh6FDGw-QCNUX=`>?#*b5 zM}25SPRYA$3>SL&2qiJAu33q;sDN2TCT6!4fSi`DGI#=sdTw=JJHq>W=6o* zX*4*iXj)0ohqiiq$1qd^d^FU7P>Eu;0h2J;IhY5nAM=>&W9jNFvZS;gR_BRyI4CA( zMNrX^xTy;txhvFx_ynAY>Lr9D#co&zVv~TvCJ$exQro*#ATc@NT4&fP^$No zC3-owI**f5lH-&3C(sfT73&I;YN2Iy!4g0g0X@nuEJJk^R;9%nEP9^T#i!d{9V}3S zU$5mi@e5Gh*a3C{ja60+l3Bfd!Uk`BJ>1oV1fNE8NJzRzoVQ=%OI10?|y)|AR`U zDa@d2jLN3+WtCN>@LDt4k}ix^Ms)B5F_hkrR_SUKLr6vnR_>fA9|gq8)v4}IX0MJ3 z;hC(Ywz5QXt|IJ~IcaQ!z-8hDtnF+wfs-DI#oFy6NQR0mOxMe)Iimo}Xl;I0jcKsL zg$qHtVtr7f!W<34P-KRWL2x95nf(>m^nPzaTPK2vjDnGgF_w%H1F^w2r9d3A_hz_; zia{CiPF+K4E;G)BBIML?tw>QW%7HZxGjRnPFQ_077;n(_27>KoGq%p$+*BVA5O5Sp zEhLwx0|1T0s6bIcROgg(M%qCnZahta0yolTh>ejb zvkj>7YK28K+>Mb@xTt10-=$UK#z5{SC3vW%5gZAnp(s3d?- zkLPHmHYte1#1Nb11mmz=QBUvmlB-K?k+CuKSX)OQ5sYSHJ2=q#{sck3qcbmD;MYUfJ>B|9-JJ?4Pf&aFa@Q-7fs8pXfdZKj}+sPY#B#nLQuo~ ztvtjav9H`Yb7#Kxe5m*A?cW>W9MJ{n8$HP60z9j$#^9V^KprjFF$EA~cKR4GFpqAnsv8 zT5%2sz$C@%%{d||SJZ4^fNBx`N?K20jvzOdGN%EYq?|4RgvamZt3-%!rnO@&lnWz9_7$!Q>hn0rHan!gq zg{TXBNGuHu0~*a$F+p~Xd4kFCa-FHd5k!tKR|ud{mo}~*2_TW&$~1U-ArvI<4f#S@ zIwvKs0s!SlAq7%3SHZvzaTt1arJ&biZEV0X{rtjwt|{UP#B4rbgM@~1C-wX~ZL?iE z)J1N{^TRp}b)u$jw8c~6#W)Lk31-=ZmM7wh*7w>tK%v(@c13s3kYT zE5REWIRk0U&=^qK91iqI0C7@TC8HQ^91a)twGHr@^&Dn9P(@2f8hDg2VpvBVFzJ9q zXbc;(*hb4VHF&dQWrgT=lx5VE#ZNcKi3GG`%$+%I?{A7Dn*m)I zq^M+E)4|bWh~ZesZ5dTSDho$N4e|zhZ_kLofZXBZm*ix{O?ADQgV~{ADxjZeYb?i? z<(8t2-tP8}xTSP-xX@k~F3F8qD@t1Y10^j1Gtw0H<_TNV3+O3)!cMif1jFN%?(S}D zII5qp*s`-AIHpJ%!?buAjA|OTj@?iO@T8hs%0$FG&OnZbt$H)R7z&aWgi0zhlZExe zygo#6yBC@5OhL~XX{7+NQOfhx*0_;X3N3}4jmXpnn2PESFlVI3m7^1vr%Y*1q%_ZO z$nby_f}+N;iDV)k9_E^2K1DCaByWQj+fpXGjMi{LaKeNe-_DA+=PB5BH>ktXke^v+?H8g- z*zg1(6V2wT2D1xjQDulG1Qdzn9qevvut$(f9})%)2)r$r==IquDU>>xS0*tQ+r$C@ zq1}zo59L;Ht-v&#t;1C@H~`KBtM~z+y`#iJQ?SOx$Y0=JRhIIO?{c#c)2D=1&qNNRW&yB*QMnPF&$|bJS=ytUe{$crFXPPfy_ug4%Aj} z8c>H)>A61M-N_h0Fq<<|tFc zu8Z}6hpahTr)YFAy*HH(QYwR{P^you2XecGpnfmQ(=jq81_N2$!l4lkHnmRRWfR9I zWPQz!(L$jwQ&K`>cQ^$+T?Zu(j`tYzSryH#cnGcn7Lb6-+{08;Mia0*817bE^Bkg< z)LuT6DVI%1+3HeqW?Nl9u&7$Zv1T}N%~*YRJgjqAvxH+~_L4(5uFd>^0PuZ zL`0C~sX(<*i@8EVq)*&Kqiva_HJ#2Ej|IBw_j+ z;}TbQNdN)LK$G+gv7$V^Ak^(?m(UP?1g%|B1ydSnKxy$1p*g0K3=)Qdq$ZwUX8?5@ zvNM!slqfmY+fv#tVp)|=bF8({+DMbh^^!0@4Ww0+lNuoD**Th=-u#4@h*UIWTWmy6 zGq5sFcjQH@QtjeIHcywz*a29xs|!bqRD|ntWN1z9h`h8GV5}`?l{F-3pj1<$2at@i zSc(+jfpQyUs$35|<`qz+Kzx5ew?*1Y3#5kn_$FO;rN6J3F(|M|#l1sZyrr$Br?fXv z9CxampyFb)rl7wtVhv+WwLwKCsGrh4Zm?Jh5H`Ue$Y8nKDSZVnRYf-_JCseQ$Jz@` zDXKwQkgZg7^y)2H8d|j|FhD9uDPW)u?2W9%<}CR_n`&0OY8}+&Vm9sV@s<RYZof!cZb%=&dt>;u_RY`7i)Fmvi_4JlDV=GFG?h!p& zZyk&Ap{4yfa)?gl)1(6k<%xlmYBmbwWC921`CTpEV6U8L8Hwf;s(RB6EnVq=GF@+g z2&{{$v+KIFLm+=69WG$G(!A#W0bpw4Rn0S&jX|kdRxlVC^_r99W}cPFLD$OJb_F%w z74K_Ec{QUrfe4^sG&d+1{bj0-nB7F52-M>`NZol&J~6t% z#k63dec7_?wDz20txQ#~sWlD=N3kmDIEtjvkBp2J1~vV}*1RfT=+ zTyDtkDu$(Zm}^AE1RN9w@6^+Jh|V0U!fJN6t4FY1BI|g58OvgBuj|R7=)BxIx;yAj zC65fWu8-H;YtwdAh`X?eWUHdw&;ufA@*9PisC%%7ITT^F)fzaI4u5O+h*4aYDuO@~ z!h{H?V^^s;1GRD^H(ILFMwQjXJ~SqbBv)u$o>W%Ifq}bOIeunut1vT@3><=u)bUxV zUW-$}6wABfITHI2BXvWV6B2Zb$2W|wg9bh6>7A){2OX#ySB+~+5M{%)J@jrJqb{2* z^LLS2&@H1KIprz~nUUk!!lD+jS<~7KWTFRZrG<7sPelngwdJV-4R(dGIK`A&C)@)$y>W9nCjjzQ zr4@t&j$u$~APWdD0x5KZ_1uBt%v_0v8WG?+BtQr{+B0U<*eJXgEK69U# zVVua)C^Nk!G?=Mgm_7lo@^#BwhqzjAZ>b5GgB8>%C&0bqT9&-TPY>mFBkk=R4m_>B zAlliAs-!FX$#fESFjf_<=0p`*B-AnHqCkS$X1$InP>U_)5d;N+NafK6jUzD{A`**u z10^m)w1!usl$K-Z^|V-qK+(blDl<#!1L{Cb-Y{5E;g#_5(8?BMYeBOwtEt)H0pJSc zZ9E;ipOP6E_f%I4%IFdj8yg5z80(@GP@En@>WvK(EN*OA(PoqSN#62;98I6UghClA zHIG-~gtl;zyhrb8tTF@w$kwq6N=b_b{|mXBCcgUCR7m1a#MOd1*CyN5AeY*wtpQ>`=*!d%;6 z1x=Q4lx1~$^T)uQLxD8ABben9I0#t{7<05pT`eUKtHxWwM(;$FLvq$zdXeJZu96hj zKZXinu&%a#TjwyV!0ccOVa1+?#}Ek6cqO$CzRb{Sr#=}s4U@k4BF*2`p)J& zcbEgfN!8W)RzY`JGB;L?Px&`2NmX~xga)6GCE9JFma#GhOk?o08G$}$vAdSoudikz zq9wKB22w@4e=x5uO$fB6Y+tT&!*HmhlJBAd8&N1oW~;o8GuWms)QozN46+2;*Id<* z)!o=@9)72%PgF`ilov0o`hgVca=*_etCA=+~;A^PzOgfjPqBYX`lU$G}e?k&Az%4n* zW^Qj!8bJzMVR9IF!nbM(GQ2eW$+nWvb&fnY+&_sQ?PqgafWzMgGGjeURH+BRj1DGU|HMJ^aew29}5E# ziOWRsF%lq;)}=5os^GmsQ5y+Oq_N^7O$04oU(`*Ig~(~0yvp!!#|VQ_oC`CK6Wa~M zG67asUu{irBC$Te1Uc7C8lfSFvq~|^RP{laRUpbB;pqi90SHYR%t;x=bwy3}C~sxs zK&7HM)7@M%;I7n``GJik`H56p08v31bh)8C4qaa)Djf>3vzkjY`$=4Q)d#45gz*gJ7ptETs>i!Fw{!JgiThywMs>i$U9|qL7{KBnb~C#O-zVLtTNu%KxVVI zF&=3epaVwxseE#8DO!de*SL|R1Qei_(d$b!L7U{3GDJ>kQDQ)f;3IS6m0^Z3G0JGy zD~8H@n4O?5qJa>|Bv-(tHWJ!as_|wtWT!CC6akN8=876seH{W`_CTm7C8i)YV7%k5 zUKh`%D2AAPP4e=7@L)khmrX8D2%{YX?PBCus+-g2)R%|4^kiXsd>}3L6V4lx`XE%Y zlOSe+k(B~Tpaf^pYjGL&dKa-w8i7W{@MdJvTp1lI zguolMfRZ|>tS(6dN+@mcUT{ujRvXq)m=^CU9%*bS=7I~yISyl6Uqgf}}dtq;SNQqMGdh#fAC2hp6R=12`d)oNn!7*+U zAdoh8s78f>iL8=#kV<5Owl}3j@P=Lo0W53i1tW-#k_r=!!V%dA{dELaPg8XgQNTdB zSRrZ?22yCXf}3^Z9EPr0ZE)6fPMB*Ad0=45(W)1D{e}i}qe)MZS=|J<7KDOm`ND*X z1>_8>^!ZU8*$LJbR#Nktxjt`Yq*~w{hPqACc0?Km7E2fAwTMBg zyx|bQiPQso)j9Io;$}Kskdov^tln~!8CMg?>F9tcn<|pXYSVDsN-mOAONY{PT}!>DDBT5J>uCTK!De3$@7$bh4zOrAL305e9iYLfhj ztq*Ni@-V?_Pb&?kw+z5?3R=r63^;a$DgXz$tJ5I^4lK2l7O#$1#7f4pYLlqe#ujbU zSYdH~S~sr}!w>46#KBCb#tzU0i0vJ69<$5JNK9llYt6L|QDtSYna9hp4yD}2MdA*G z8H}vY6?L$a1ZA%&*^#KhdQs%=Fv>k9k`kLWFe`LSRg`ellyErMBmo)akj?5sFqfLq zm`!wK*0C`737n9b&D3dotU;3yz$|M670bXhjuPjGj&hY=L7JnDhoqDCV zNC|YaA(xPnvAC(l{H}38Cjp6;wWpNDX)#$tE7?CfHmbJMmZ^sHh?b~Db4hi5*ux?PK1+5DL0^ZCo2F4yT}dU>TtqCE7|`L2M!%WM8wf6{j4LHNZxAC+ov{2lLeznC^{UWK;d z!pMKgkLCg2oAE*XLHqe82(Ul?sKsn^-D+*Q)z^3D?u$1MTg|pBSDuf*pEDmZDSE#@ zo-r4V1dqSYE!nn(O<7#B?_y+Z{Pnw_RX?7nUYzqGa^z|IsiQ{;xt~%$Te149@Ao&D z9c|Zq&qx1z`_3fDe9UU*Hl=8lR;qq%H^AX4gQ_> zAsIvZ;^9g4=ACW$!*4%;PknR!%bVJ{Z|BrkZs*H)Ydc&~pn>8Jj4xKc4A|O~n-*qPX{*;%OxbMAAPG3;8 za^=RV-S!tR9=hwkUcP+EVlrvgwE3Ls$|??%w|&>qx}61BHg~7qcFFVP!5^p6yOFmmd`jY?cM%u@J{!g|v>Dq>W=ZVJ?Oj6X3{`q4@K4NAr0y-5C|Lt;p$;{`M zwc;&Jp5Lb~q^#X3Q&fJnBKJcmav}Yj%NH)5KDvv);@w;Kjhq$phF<*de!qVp7<>{6 zJh^%My7Sk=dv{2_;e171^*Q>pMGNr@mk^dS*s|&a$Bbufji-+tR99DsipUFQ&B8$D zVe-kuqLs9=^%e5%>VwBDO&49A4}!ms{575gj{W(1>|V!3+mZU8#9OzszGXjt_N*`V zS3G4-CYA0o9zSvU{$pR)gXfXwkNY15dc1dUU%hh1e%x5E-Y1j&cK4NF=I5 z2M_L-D}Fk0=91fU=YAyi<}Gki=IFb~rBu!~jro7iMkdexnEVxY^InBgbwIJBrs1HZ zx@yyQ_JW+z78MsAzxwl9+T!UMneRhpE?Ku-d-~y@)5+^q&c1Q@($cLz{9-30cW>Way>*}Nc*pN=rY{$$FFsAo$fK;; zDA+8m-ca^s-n5Zhjk{OPp8C;&=*OZ<(U*N!e*W%L*vs=fK7a43a_t9CZ3n(xwRk=T zx4NqF{-mOPmt)hHvUeQme(~Sb4;NF}-))inxcj>`pL_`X?V{w(7E)6W3bLpE|_(=zTC8zx~$@Yj*EiJd-^CRq&rz;lJ{W+0~6VEk{k; zvR~Z&wP?YoIpCr_N3UhB*xZ)^bUI!6%(NHhcI=dX!`Z6RI9_FfQHz#i-~aF5z|rchw3#q09>b7m zn|ekjWoFHWzPPOuF8#Y*SHW5T75ke_`;VW!|JRhoC7Tbl`=1V^0p9-E=Q>uo=)XI^ zFfp_Ly!U1b5Q1Ox)%T`5L-18wP5qPL^U2vSTJ}&AF7vjpnB4W7_TGs7iLO!Yx_s-* z&b@oc>8QoyzvV4LeYUaD=ZWMmnS-N#@(z4f)ZxA&`jnD$-+t-nci-$1u3uNSiTl;2 zy?dm`E?;cxx*wl5fAw~iE%MJKJY&O_0}UryypwZqoVrHO`}6Y`Ec!d}+mspUAVg*Y zJSlDJta&SzFJY8^E!n$UQ?+^jPsa``MBkQ`tY?aqOFI}{1cO2OL&384sHcO0+$J&2;F`iq ze!UfbGZnX#%x};i54=r>&^K+@-g@{fBWKZ?Z?^wnIAV65$;?|NIeR}go){YZBYgLQ z;fL)NrK_kb*Zt%jMSWSdWextl2exB6?GcMBg^>_g+ZxXPvUxr2qknH}a97sljpnai zTQ0ZU`}ZT}&%xQ{C+?3SS5`#+n70%Uwcj;*|MHTS(H0_HVif+m-yKiyG*WUGpWL+ zd(R#{>paX`Htp`2`fpb(n)B|Y^t42B#)2Zw&f~tR%l3Bx7qj;?o@+U=g*dtA7XcBP z9E(1Ea`*Owzosvjx&Va-&YyR-UB7v?)%>%3%c?1Xvo%X6-0BUV%>%?9xNl!j(pMIe zSC@TTCD|d{x@zf^{!9AJ|K>lOsd@)nxO~Qxtp7r9Go~UJ5kH=rb=Gb%9@@EP;YXlo zu&3>e)o9Qff6>-mI(O80^V-#ik8b(yU9q0hRc+qAee=GCy=u$pL#H~N=gyjT2{x}~ z?~+z-Q*Hk4==ILu{`upNvESW3_c`Nn&BoO$m(0ZA<`rkI1(Qm2nB9lZqi7;D`7HL7c*8;Dh};Gbot3p&R5(uWnU9e zY0vE^4j*WAKYW?J;_E7zXu}q<+H}f#{ieIk?(p1u^$&b1XxfywZ{LLOJ-B~w;P=62 z&%5urZ@StpHrZQkrsIbs!kyC3L9xJ-yU)@wUkMLh>U;fn<}C1}iCcz>yy#(8&b^%< zO`nN?O?zsp+N1el`zNzz7k>HSVDM=o*>|(U|70`^g@M)pPzyIbvy`NtI8+#GDb?@G7hfch%m_c1x zumm&tV9>&QV+_YKiT4Y9DLIMtpEPg&>#JeABTcB&i}$I z2);?nWeE?ohW<{xdFn}hyn5&N8`xi$m3_axMs@UT>(v{bp4M|s=F=z6oNc*!y~FJ| zcjN~)5i@@QWmO4#Ln(*7Y|i8t-g8RfX2rP(7?1xL{LCu;BLu^rZ)y0s{>YhYegDi@{>={!wyT%yO=`{FO(M;iJAdV})F-`tPybAxHYa!IL6fz` zELjE}Xegfk>e1DU7tfx#>hoXKX*5d3K8<+uw-sBqZx#O_-t_%98zmynY6=-Od-~+} zCyhV5Vc4~SxpqnBUxDk+1LAEPS7tvrq!#4kDQlFnU58TFg~w)&{~jCr?a{!4?kTsm zXv2*5iaGrbMahzdSW>C@WMs+qb5SIFzvbyP3TMX=%ZancHCw53hHUH7FPFc)C@spF z{_)BmqszAZ5{FKmnltmEPAw?He7tsF(;et8-Pyt1A?T)%rK2O5| z{SVS#y;pC(8cl$)xpPQsR(^e^|Jgs2(bExe-_0Xy3ugI^isJXiZ}aDZqbF5AZC+o! zS1fBg^m8P0)*`mz^o1*-zy2G4b+h&U#2i$@f6^2Nuh^^GEfVR6I7fcjD^(ls@2a!6 zoH~2{T-QVQFFQV+@eXiouwU`%mtQa8)}3@d8~QWty}#14(gu7t?>`#Mz|gAYvfUbk z_TbqY?E_O5R2)2FmQ-)#x;swo-Ld-{{JSSfjuyvja=T{!TGE_y<(?Tg9{x+(v26zU zrd=l|?76)Hy8Xt#(}!P;JUhO3)7KmHxB6oLq&?OPsfDCP%Sr{}-RA!VVvnOQ?s|QX z0#64sq41Apq$Se93v%bCzZ`h_YU%!}ZdoTbU3>U0dMT4RG4xmVch}Nszuy06 z@v4R76*K3q+EBK;Ztr$s!R)kPi*9!bD*kXF>ggGs35LA?-ZQ_~9Xfv^^6atmNL z*Sz!8cal$~eNlGQHWK$$BR4?H_*vUQb{9 z{r+pQ_dhQDlEL_F-W0%V-|efuz~xIvWCn#|ca65@%Xtv^qBUQwB<5z%noFQ=-?NFe z_&wM{IV)H~__j1l(h@lh-Zx{(gO}zP|Nw^!Xvi>P=g!zFR(H5$(gtnXlW< z|Ndv2Tyfz-%k>+#E}uTXaOH;7mSn%dWV`Y3`LpN~kHh-Y{$2cUz7uWVv$X{Ke&Y8| zYYlGV+4ZK~glOA|pSDroySRf*`ryCh{MBDB%|?=!7N2PEeD!zQ+{z=aSJNiHeA;RH zVfVM6FP#4}624}=@-!5lO0G=8ku%;;e+RLsfKm11Ve2`E-F)KA)konsZzsb+fH$$f z5udVnJF7Wg#s$R{xl%3Pp4y1k**l~E!3e8)TMXygTFoZg(cN2FA0;Lt_UjMt zKKLsOwQ!xu_h$OsJlYnC^zfMr7d=m6Gs&M~*?XNpNnlp-28F{9oVWBt=*P>+Q&EVw ziK*|t$-)t+C0~BJf{q$}a{Q3-QWmQS4tRHdRp-;--TMK@&mU&bgy$75qc8t(wEghj zBQ3WsH~sJW?$X8Jf5#`yL*tgiUp=~EJ{NvH^!_a1%Rf9ve?5Ete|Mhz2KXdzM*8h9 zUTeX}a2;RF0fO)X^^TR0`@DjW{_Fn-^wHprZ12wKV8R)6H!sZ-9j zJAJK3gEj=3UlqAlvx9}NaiSAI#J z^AVOKcck;L14*9~XMx}JrH+#PZ`K#}4>PdKzg$jCw`Ja1tHlETjzs?k&x545htpE3 z#3m8q(J!keJupz`BLS1Lzx}Z*Z{K55V_@d$gVsCY`?t<+gFlcUF0L3m$$-4N@Z%nl zSb5aeleTP+8}iAazbKoI-bu%iKcg&|Jal$r_N{8ft@ZEPB`fpiq4HO5HH0B+G!N!( zGW)Z){NxzUd^_@2pGjHx_KxPOtniWb;Gt*7_!Xbenu~(Z$B>q9I^yV=MEv^a$Vcm2 z-{RJr+VA%Ne&h5$Cj3tQsv$FbN~`kQFB#=O92=OvR$%$(1Lh(7=)&>?=X^IGcDMe# zX+_q3V>$G}FI&-xpwC$UvqB<2X}bP5y3Ej@TWPxS=A$)R4j5c_p4_|D{!>oYE5N_q zHvhA8Kb^gNu;xJ1?r(&<*c&7#zOO#>tncpNtUn*NITQj;2`n-B?O%V?{h%^9ul@E; zp(ae&>r1C>Hw9iM)5k*gll9w{CcBRa*AyInGS@Zc;$2P*Q6O{10*~iL` zcg%=x-|D=(<8xa~TY4%?xWhdWvFGC}R?Pr?G2d|y65F%KxqCig4>x0eMbYOgSFT~Q z)~+GEpO!H#>jM}BG%tTq0Rc4|IwcwZchVuncX^ZF&;RJZo~}sH+j{QGy+^-FbQdfK z81p_NG0Mw#>a5p;=~-y%#yv-D7cbptIilG?n;N*(_>=N*+r8+&Q)WW{eQ;*aS4)u@ z$xH;cXx&!TG5i1AmrU|+KAZh-@TT?9_6D= zDX)ez3E%A9x0RW^ekYvxZ{WmNq@uNZ8h&JFbziu6FZ!pqL9|}5H>Lepxd1x-Lkw!_ z>+YMkpG020zH|A+(fR{*s)L$?ikkgK>&b)re)#c_POp|oW%~~NxNna_UbA&mjjF+* z*dhM$%!R}I4_R;AZZRFUKTexE6wO-j;rnTssMTB5J9q7EP?~T4F^%+1{k5B($B%vY zev3SP^gM9G*K_&sQOns=4F~qs@R-G%bziKn-n{*|$#N?^_;$`h?6S3AF~4I{sYMH> zXZ+XKa{lxWReNeSNsn3o_iEDgrK<(ow+dB1Y}?BFWFCGV6#QOtJbc&JeWSf;Q#Ls9 z$Kas*MvwFOrL!k&w{PCwv}s#))we5<(`FZoFNNMF<6+NvqgJI+?y1?iOaAkvd(U6} zIhH%}nNX}kN{qd0ugidCyAxih9@z6+kaWX<<`k6Z}OEMo9$-OifK1@9O3pf$HVmW*3?*AT7o$>BpgW7J2*&evx3A%^j6X@HbGvvP@>Kto zR#)deK*4v~SpKYkhx;vS@<06h$s0IjqdB=$*l;W4lv9D{*X4fj@1*&>^BJ78zy-@z zuYg}utQh{KdM#`UmbvTv;3xb4_Yyff=`*jJdhPhpO)Dh#B9VEUwDMnmW=@)t{rJ@5 zDKDPSEX$od8=J5=p0@3Bym>V+or0S)?fnt|$?reK0*Bv1AjI#FTzvj^CVc9@{`%E( z6X$@Gj; zH8ef%ko?e4vGhxR!SqiTO`4SV0Wg35^!c1(%GcZY-_;-g`TVuVPyT_-n>TaH#0&q_ zf9}+LaC<)u7QXgk7Wv%NE$2he|0l!7&x@9Pkp20=(A=+d)8_SuvVW@Nnctvw1~CD6MebKXw;*S-PhSO*&DxccBF zqWVl9YP0^*-TvtPtJ>npoeB;CGTH!3K(xOfGOx!af;YS)IiJ+Dzx_mVItW@;Y4rZ_ zZtf?vE1mZuudn~_@t<#JEL^#6;KFDRq*T4J1=Lib9}J#DOz&#^%V57((*4qe$TJ_a`M&6X;(!b z1Frp8j+yww%iSgY$(5KgpS5M*{>C%Twx(YW@7cKZyU$mz<9;b^_uK=%56fp%aOtia|xxTuhthlVExmR3q=i&=IHMf+Csg1pRkbhKmeRWa1 zy&T#1KKq`s*`K=8lp`qAyXr|bRm<nzT72vjP>(+c9b2Ds{o4q?$bvFr7XAU@S=s4v@u3dJ+8`Qg^y8VC(YmVPh1R;& zosBCrQglXrbNiU|g38K<=7z#7aF~Oc5eEj!{b);lYJi8IyK_`bPDSU&^BW=ram1JXM#35qk7uJc1${1LEnTPB`%TwbMqhl*K2(b}Y4@lr2Y3IGPXW7+W9{7{Cq5b6gqonb~(-%v|}r=Bn^m9$&I-1+y%!|w7x zYhz(`@BGF)At@Ox>fb?4W~i-jDs1}X~18zLqa2Gq^gL|4=J z!Z9kZjt{uHqkm#%q`oY{(NtbUR9w!;C9J4#>xW#(GAzGy@e&G;ONIan?RBH4*i%yg ze!crTAkb8>(Bm|<8SAhTHKE;q3>VO-jM32i1T|pyL|9~K(FQPA2WP>-Pc{`NqxWB z9%30&Q#h@V(zC;2$cg%~>PP>z^N1_z(}$=Y5Fg`jfzjhcf7KA5+_w6s-vKY_UB5Qc zTNlK&8z*yKT~%9KbHZklwn72YP*Ws5>ImXHDl5-UZ2XgsZa#p9fBBe;QK0!;YcJ`a zdFBos`(jp7lmX!kd-FdO8}pFSsX5eqsij`hDLeUFOR+*VTQgtfxtgg-s7JT$az#{Z z(<@3y>5EG8|-x%saCE9?b7EJ9Km8zF9Qh zV-gd7l&ik|`w8K{txL_!ty)7=NPpFU!122F-tjRG8sl`ZhMb6|BW0La=o!_+^7@ga zTr2P3-X2bhr2qpC(9^Y`Qrl&93c`8uYjVPzi)uAM{>A}9Par@g`)}(eW%KumjM1{_vGpwjfh{* zqo8x+_=S*H%QbKEht0Kk<{lOe#J$`CB(Px|8dwSlvh?%7J`#P4H*c( zrdyKDq-md*R8`K_IMzrSd-!otSEIc4qZ`}-Gn7yFL~`=>C^%RS}xJoHS| z$O|>?4biH)z5#k+z1z2MZx;`c&00`P!#yzr+;=lKx)ragp~s8G%uWkb5g?&JKN%US ziOVjI2uMmzO3X z5oE%D{yv;2Drgu#!=&fY)Z`)}yq=n!Y3pBDT|fN5koIet8J>n^6i~9WwvA441|bbp z~%t;-89I*5eYcO739^G z*$}6b%><}WMu$7vtBcyJN{cHCN-A>V3%W)&UU7x(y~GbWEwsJKkk zsbJ4~{9K*Ygrv;EOGfs-Nm)4fg?Ly5K_VieETmL7Ic~O+NUPHkV)PUjWBc190}B&F z4-Aw2Q?qN6&An@{4{IA2-!PBoM;mLWD71Xy)IUf>+<9=uC5KnziPH0yWM`c@)r~83 z-c3K+1+xH2U2PfKZI_>Hv8#q?dk5c~1vJA(c&*IoJGjro!3g#xe}c{cRiuNxsp8!9 z@T&fv+Ty%03n^yo&-L~MUrTLeUGt>D$=%EG>P{?rJ{f&8OLt#i?*LaP8&fq|F?Lck zn9ELZs6CJx7wVf<+OhhUfJffI!P74^Dl#V0#X(m=kcAir83}+0K!t%nZ+B;Zob9Zy zZtNc4djl5F2PN+#YhN`0!e7n9yEd65x^%x(98j6yO zmQf_VOL_|Zq(1xNA+=*>b^%4Ff@N3^Wl2`7o1tVEV^vB0u*wZm^Q@^aT22mTtn1~P z=paAOgqFom@u)d6znZz*`%1a%bauoaYUO$&kFIMBs^2>GryM23*{-o;DuEsPHgj7~t1eZw{RS;^sU-oB1T${+9#|s;{XgLWTBv6ciip9q$|Bp}<1+@^W(jj>+o`uIlRndsy<3;L!jz&HeK_R<`?# zvmzti1L7n9Rp+LF%TnVL!A;xmFbJP-BegxdPf%D?RFH9A+{9WC4_Dk+Q%=d*RgGWO zBgWB4+YsE_mQ_4)ljLcvByEwJGxW%8m9@*J9We0td@|jYt4VWJs6+MH9Azobj6=@K zBk5Mu`y^=STeN!q`2K@TNXyEEd@^2a!*EgOuOh5tz~c?&(%6+I_g3g&U=_2@7C!j^ zK)r6TqPkkx#L-A@ZI9&H&2+I(JmmJP>T3quO7e*4#q}I+kLG)*0A<~~ZxQj(9+s!) zPO!xS%j>eem2q!&HV@F0LR)uM3HVsQ=i4?2?WPz#lVWWIiQr%L8!TqQF4}w~H|r2GyQYy_VSVf1^u%yaU1n%lSZGH3%sMm~5fvvs%*lLRjGc~# zqK;==<=Ecq4-yJGBJAta>E6{J3agrZP-1pg0XWph7)Xir1re@b2-$>H49p$eJS=sT zxrs56UT}rmgYuc+-!tM09X`U3J!5aH6s%v z9c^iT7Ir}qCG+6eqSk?d$?4XHk~p(RXdFsN{(~0v@npWU6r30r`!6BAZ0_-eP{PdF zH?gi|VEgQB@9b`Ob!%m)yQ4faDJsBBNm^9eFt}|Wz$$4Q7@d-xpA}^(0XH73$cp** z`Sbg=TNh`5db~IHPl-R91YteoDRJtUxE`YbG`QR3hs zny^Vs&G0oe1{JH4qMDh5wTqLzwWWp>FXit}cZ@3RK#0QMc%qFk5#r6x?Bx8^$|DiA zieJIVGo7MsQqS2JI-97xtfZcXiaZYm(#3d=J?BT4w=m*PO{AL^7bOKgHZ2PUi6|o) zv%L2>JiA@hHkMpy(>6S(mJhgVyp(lxa)%$iHu*< zR6|qGvmigt%f`SwE~{&PZ+ot(E-Pd75|bJmi%e9Ii+HyfT-N>1#;qh!UOgy2)5j*S zYb3)yC?~PFJl4ZbQ(7lw_KRNGR-OD}q%b-$yANK;FE>5=UvO}+uWL+hPE~zkWOV8T zmZ)vChceZ9M~1gkPzJJDfRi}XR7p@!q?fg&EIQ;{%E&31_!jZ1n`})Ekoq{?ntudP zv58B{$?F@*3UH945($eby8F6$nz2%#AB>kqR8LJJ>U$LJU2UJQ6ojbb-Y<+yOfEj& z0~EADR)HW1TPY@87kwiiAQKnP@%C7*zn_(ak`9Pf-6yU6o zh?=NNPs`7aiw^bqmy?|x3z~Ul%;P%dub{ra?iS;0AR!w?i;6=GNev!^5m%(e(e0fd<*ZdyULL}= zR0|Gt@#yN_h0TSQWM2n;1yv(ekRS)%Nsdk{t(sUnhCzb)f=HUFj&w)EBR#e2RO(Ju3xjQG(&rOk zJ>k02H2=85kx3X?yQrRPe11I*(%m@zn~DC#jXQe%s1XEwN)fuh%UQ6SsR$VrBOb1# zq9BKojsUNoGf2n^@|UlyZ|xmY&^&kq%OD$CHHX4)lRNhP$EgGq5~6{5*-UT*p{!?^ z;T%>u$WX(-yhEXJ=p@ad$(D2-;|W*L(cjmi_ zi&C=-8zv6mDa4!;8#dl?xwK7OvMcM3&LMj|Df;c*LZmYDMXtXHxTL0aboO|8`{a0Q zt0(`Ty(J`2=;N-<&A~~ps;q8iW2C8V=N9H4@GmAYA*w1PrL`QKSJE^#x$%q)^SUtH zTvgmuU(-3iwQ>=k(b}>-Hh;O<87N0~(^(wpYUdi$^~hjb)6v|VZN$Tj2t&XoXC2qn zm+P)Tu+v(S_OGCQdh-zz_2KhrW?;E-c(%5wzo%`YzO--*LB`tE=%+QIaO#;@Sq2y8 zb7|%s3-0=l<#W2dwx)dt74W%I?4aftlbV+(r>fu(o87Ych7A<+OsQDCfuf~llBZ#i zp`(!$C!>VwD|RLS7<83l*=f!7bGMO_lH}wAs;Ouw*gD$f7MJ$Vj9xtb01k#v?`Dpk z76(VW`!`N*kg2Fu-2Lk|znSIi5<;WGL;Zu@EF2?huj#E?Ci`j%gVhnC_bMQdxQ?2( zjHzdG$JiOd*38aCK~-K_bz|e^+Xoo~12-iG?r&F5O+bLDoVlWN^YD*;Q1Kl;1s~zl zUb(Xl<^5iQn>r;a(Z%-F;nC+eA`LB*mQ`r&Bfv1JV3n2<0x3wSKpwleguvkJ z>iHLBVgXTh7Cu2aO%(}VE>;2}HZnqTTmm{KdSZOsugjUbU`f2o>9UMK=jQhDCu~j` z)2Jjv1wSVnUoA6FOBFs+0RbjBLUu|Cy+eby(WM6%qy>i;gNj z67GKE4A{U%0~ZecW#jtz@JEQ>HaE5J`Sk}K8vz#^7yA2pX1e)bpuM;t6#(b+8xw<6 z*wi+8;QShg8D)3BN#=FGSY3+#Y-PJHzkhUODL5r}g+J?%UNK1JMVL<_YK3*epeKiS2z{U334oG08Q}f!_;eAnQn7=ylz`;l zd;NWo4Qun^8FC{PVsM$MiRq|YhUV6lBPQPRk`N(VCKgR^{0hlMrB`q5 z?4!OPW1LPM^>&QMhX*T3s{tjHG#tF$Ju|Z^Mwj35Sk&!8RulAXy)ym*giV*bhLDN*gsMl^-q^o1W3VS=2!nK z9?-tImeFyBB1C;bH5e}N ze17$a$DDbEam`QGU58E(p{DNJcB$jA+jDemh& zI$j*x8t+@&d4i|s6IIcXWFkk!=J>co@!!WXvmDiRHC5%5_^>d^$QgJU?-v_= zl#GqEBbH7-<&y>;KoOPwuW^y~G7rmEJ=oS}`OwegBnS21>|*-}pxPA)-y z`j?^7Xbk}-9C!c>JUl*+sGMtA{~?x?S4>WX3B}K_yAcTQeJjmg687nQdG8*;XdYbB zwE)E^prb7(CTkH@J+L;r_ecc6W(2$1neaW2LWZo9yr7(rw34QY&E4B?i0Dj2L(Rz{ zCd4l!!9z=ndV99sSCQwTz>Kv&xiQs$4vQ<~8r}8=Ae6Nim(-#-|K3PW3zrs_=j2p3 zc1&vN*}6WvJGsV0=qxGKrlz?rD@nGtjR=dXYh6I+Q;6zrIl-qCl9X0d(vnwD5~8Ig zx_es8jkOVDhuc`KPfCyXiESO)qER>Ys_iLAX|0J)3s>Z0e;e-2b`3BW0;$LgN*S95 zmJV;@$lGR393i5yXgQfjB>ScmM8+j~csockq3!hM*#jvrThr|MNzu=zcKbV)CTHH@ z$@Ic1-le>Ukl8F!3Y(@nD(aG*oD7uZ_-PPN4~Lsd!(D9TIe~mE+RBnHp#c>~=UDP4 z(JgZ?@QAqNSd@eS9OUm`n5V1P>;3!lU1vTq8Gf6pF=&0C!kx?Wuh+$a{um=&%JbDc z3u&6y!I}{#qN`{#Vuvm+mY4P>2IR?wbo_G#eF*1=I>}@<(*$SaET&nOjH(NpWE$@w->;jIZ`zEu7p|l%gK=V5{!CZx;zr-XPE$@T*um3V zMO{}`O^OnS5e)~(&TSjvnOkb-T02=-64=2}sDXaPV=lP?6CPDY#X99ISx=Y$>z4 zOJpHUpPI2<7(5&TJowj(!^a;Y5wqY3Z&Nd;p#CRlB0<>aostqgabI;gRa$Zy24W_h znZ6cNpwLyvLbQ$v%iHTpR#cEONJdFNvSS@h!V=sM5jmKIL{yYD)MWTr(eJmL0_=og z9~ZOYqph`E!W{jZA@Aoa2BLh3SHfUV-Q;vbV?&r-kR>N81w0fQHUbrgprnSDx+Isd zA}I{oUFAq0IIXy-pnY=g1dUnHJ}JJYucNWKFw(_P3>Wadark(R$u3|Vn>ToQ1E3?u zWX8CI*)FWhi0}(`v+?i@DIJ*mW#Bf;D;T>!SijpFo$1fdiFVVLW=46K$_+H&W2PnL z(J)D$xgs+QuH4=GM1VnpSfL0{>wA+)L55ftL&56uOsJC9{_|9dag!I%NPzR5u>@~T zyr~Qc4cBsD?up!+$N?=lgd@BNb`-^jJq#!6wWkD6cQ*zlaK^&T}v{UcE3HsAZiuw08xA2mSLpQCI$Jq1qDZ!^{xZh^ix*g<^4*=u5Qp-c*P8?tn7WXwe)#-`FJ#~ZOx@gx5pvl z&iTzG*wg7>NMe9086_PjFDuH&88Mp-1sn6BVCD+QLpxK}tPuUorj0rDEzFm!F=Q z8R@1*^19Pq(^_5D*gW}&uLYih7BY`3Ya3oaIsdqwFAdj3K4|ur<|KMPe%aW0xWi!K zx6N$XAmh{V_V%zA!np2FP=epE&ziwe3NLA1-f8l)Aa8J2e$0!slQc{_B6X}kp^`L- z&rPjrN-L~S4NLQM_H%TJ@h~)1#(JC0_Y(Qut2bc4TucqO=0^BDYAsDqiAyZ%*@R-# z@U3{@$^Ul#Fs;V3$sGJf`zEA)IbZY#?wCOZ!ZfS>%_O*(>%|NcMVy&Rn1(^dg~^qh zr_aCd+w;BQ2C%1{su({h1t~Jj&E`yRTkq89co39Bq<;H(rogHlMA6j@I6?Q7@Si6VD!XZOO zgI?+^cGaZ^Jk2`<#)F$%`WLrPegWhH+QD(fv(Kn>67teAq97mzl_)U|FXxeTxsXlCp;cIzoLm-a6(3Y-|*4vD-9Jo;P2zQufEb%O@`_B za;_rH$67&HR)~h535SuHK}cT8!pSu@uefz!=IHqamw`pj(LWNLk{jmap^o7o17dT z>uBp~tuKtURT1T<1w3pn-a=#Zt9w^=B|DoN7?>G5R@N8R&Cc{T)pstg47Ze54qIr3 zR2S8c7I56YPS?$EPh+r4Mm9D#r07Zze!Lwn?yT&flM8r+W+nbBC_g@S;aaA*~grxe>*>`eG20JZ&br1s5 zUsrOh1|2KwH!Vn7!96~{w5hGWdAPTsw6-WACps*^D=OGGD#kxJ$llt{*231sJ2*MF zsJ6XxaB=J6jgOz6oP}T8-rhUd$IS@R|JBpel#vnPk&=+tGqJJJGiG{SZ*&(YznWS* zK=z({{v~+mZS;}0^R+CR%xh^jmekx?f&IOos4R>$aj^HOJw*^O(9kht z#z0ywNVenPrGdrc=QE6nYdYSkYWew|<6sEIM>Cn2NakpWVn9A9FPA|pd1(Ls{< zP01cw%8EMDbP%P!Ff}8&q^_i*zNW0DIww2D&8&UCDb8Ar8}M;h5$j@NqN}815A*6Hzj9icmAq z;-dq;-v6*^wIZ@=o9n7dV}rd+)l_*fA;JY@m#u9`iYjUt+D8E3@pEf9yK#hflA46{ zd}GN5nCOJ5A-xswA-2iT{Zz)qX)0=vLlZ-PPy!`$7x&QR{dd(1SIN2PRd}vvl4#B* zeU*(gP1_NKD&DbW_1W>RlFhUzaHRCy+t)W%E9Bv(ePn5FZzz<*ehuR1?%?S4bsU^^ z1iqS6z4<0>%bG_LO1v-{WY=X33dy732 zpIFo!Iua0%_6e6sKw821pUv$r+TICQ%6 zB4*@RbcM;{*|G+uo>+6quI7-xG=BsQxrd4lyR$Nr6JR5ZaI;kC;|@6)!9Wa+%E~~m z= z{zXTIcsW^Ufw*z5mMgW1DR2&27LJ8@HIEcAg>i^P>xh zSO7vvURE&yD>N`{q)b6lFj7}wh2C%V z6yjhYLWOy`KG^!@vrh~*HVQ6Cg-C9EV$veG`|V@%IiaNq)*%sw#{LerQHc@h?hM1` z8YXT|#Pd5hBL-r&1_WD8!DdqU--uXL3Qj(5et{v;m4g#E$Ou>*yyU_ftZ4W!`&VBl z8`s|t4_|P<@Wdo!luT?i6r_ZxP~VqW%nB)Bepvj67+J#K@jwM(X5#yfsJ0-GlADo~<`rVrVgboX=%`wo`sJ0h&n}$9@yKh~ z1xF<}Ej~k`k+5@es|xc;%JFfM(_p;rG=_MqiC{e+3}vN-BsqDRJJ{&>xm(5h|I1Ej zD(^p8e*?g?Fv==A`-SEe4h&zMzardiEKb#>R#w)uPA_*Qmo{bByh}Sh#p0Qj?yEwzac~s+ts#myMF1iI#y+T-`Y^CLNrR z2u?{XYH01C87MI=T0MHkmql!+b`H~+MlG+E$^w3u_v@i#q8Gp9NXCJ8zN`K@cdOrXkcXJT}-~BPx z<_`ZxzJ-!^5JdxG{QYd0Ul&ST6*&o@A7B2^xh>M?Zcw;HXbAAWHU?^AZIpPhUJfRU z6N9Z}Kw=y$Y;@#ggcul@NI0a_+`2IX&_*Mafm@Ukl6+82CKx^TtjLdNj~LQnBN!%S zr=(h;DJc;uh~4(2bBVg74_lKXyD+>VvlPM8Pw2!SJptTXQF7=D74gmnx~7hPxf9>Q zu?PIIBe3GilDw#s{u~$Cy39~Z-E}2-MQuxWgetMUP;!w&FC;)cV-r(#Uew(@WtawS zgvI)@>bCh?e9f{?)xyPFsOOXMSQ*r*a5ZjxI3yx=&B*>wLC56Av5AT4)(m%L+Lwt; zOD_1szT8k-BSXuelHLPUcCDD+V>Dr1Q!6tCPR!HE+9V%sA<~D1(WS$)Hv}^EpxC+_ zh{euycXC|qVJ?aDdUrB8dwTRr#-pTV;1v^JHZgOgqprkD`noXK&{*v6?5@Sd!-nwo zeYLo}JlfgaU0YmITwGE$w6bs+g+O$>ZTzh%!q}83xFZ#WE0bO z4hVK}jm;@vN8j7ty#mln+a@)xeA6lg)*Xcxw2z5fC9ZMvuDB!k~B4Ysq18;9o7^YYfAI?1yVr6a>)@gBRw;pF>n zk2dGV=5Z>8ob8LwZYwlI8ux*#OLUO{ng_>^;yDdq8duEkeW3kJ7u#dByTaz^Gn_RR zL>YsWbmV1Bg9G9x?+<^Zt*!G8U@2@G_r8UlLNeQe?Lr;+X)p%dq~S_*fUtcPO~sK- zdt1M%f;H3y?0{!v0t^$7bHF(Z*3cWlC=h% z=k5eCR3dT|R-fiQp5nQ06>l3W7ip-GaQ@pWSCA+RomF;0@4rW;a*; zrSYF812O?6y>im@yKs8{7L%rolp5_;kqh}?jyJ~_Dj5>GR%+ueFeY~xh8F1!>N-t{ z_XDh>#PNnIs$(2~O-QYlU6Ee1F&1ua&OVZ!?N3f;VjvLmgiU6<)Sgn(B(*l)qyR(B z3;n2;9#@2IW%_tI ztZKb_`IbrP+a=S8PA=>0DhPL#N84x%m&E zp{Bg--(yutkLOFHZH4iECO{I{v(?4<$(8FDXhI4Cd;*l0i`8~;lnsyq=Vy1M9vtLk ztR%|KNQ@2-gTD?7SGzVaFtT<-C}W-4 zHoCgKzq2$@pXsS3g!8!ElIo+cAOO+$S2srIuD@}`t-?$Drf0gqv9@voG7=yo6D=)6 zAQK+c$=1O&3caL}lW%-xVR3z3MOATfe4wEskQ5yU4Gx=xf&8@#cJMa%`}zu%I9{J~<&RGo!q@zH@qUcK6}&4H*|3hn$v- zf|`be6b}az0Se~v>U?);W^|~htEH{Iv9Y_ivu9#zdh`7D9R`PvnMYYm-PGR6(J|1^ z(=X7+)6LV#(c0F`(7-@XLlKghRMpYe)iW?>(zmk%ubh2K1cqjxU=Yhj71Zq`;*j!5 zO6X|gKL703R+L$CLvVi%Nh$xAzH8-aox; z@^oh(>U-npx;ik?75fvm+r`TkZtn;zB*4C0SRX0QD@F7GkGRr4)1Q)99%t?usio#_ zrREl3;gjZN;%TAcksF(P^R{}=$i`rroKbOsjxG?GQMgCNAfH&%d`Cki86FWcdN6lS ziEx2M54e9i=_!acA^sfolf_)f@f5*%Uuep8Gn8j1!$*Gk_`AJFKmm}6${L21bZo+N zn?!W|(t?sVU&&0Pz|CuyXE(3kH(S$vaUimfN*!*zeqVF;-Mn~vDg?C0>AB;z=?efn z0m#TNfAEf6(!XU7AnlqxeUApzG4e}EOw39OvNr;voDPG{xDhrgeAL;YHwW{5O=P(# zQ4!!_AsrHKQC*MN)PWCtv5=t;7T3zfH)=6;Tko(KKVNTM20Y}Y1V0nvi`6!FMG2Ct zwW;c8AAc7k)6fvV%%S;RC`KtotC)(3?N2s!&4{7t4Jd9FMmaMn8X?Zp(J?THnrJa4 z*nk1*WT_^~+g5>%ng9;cLOwP5haL(u5kka;FijtM#rXR;$&-A|=RI!p z7Nut56m@CeeNkz>VoMsHKZb(YYYGA$BpFZ~^wm^k=Z;`1diJ2odKPxp1p{w0fe$_Z z43v!gyO7N~F>LDw#%D@xh-RE_BNWK@Ck`GzXo5zr*;7{TN#zweNa4nO`JS^hX~z@F zYg1Z2u_(PqHsGy_Dpmv)BwQfipCo^762v|?DflXLfzOuYC=+pI4Gq`Qc|@z)U6%OX z9SS{BK3%1|;U=T^#Y%Z{dbqtm2<&2<%&_$MjYy1j!U3<&NlyH;yEsl;L6ryZ?{MIs zi-kBoHs0UH=GjQi*knQ3P<_tGWasSl`S~jm4TY?OiD!O!#mMf?AsQ|Kmjv;6=k)me z37S-p4uxF`2VaZ}okp4*Lx>lhhL0SFft8S$i=AEoBm`8D1*yri^J~fS=vXW02Y5I` zw2I=khQ7JQ^^^aLO6W+0^i+&|f_y+RVG(q=tFflsP#ZO2IzqTVq)%ufW?|2qwD8u^ zwp~0jCaa*3%KZySc1OoIDz^+SoF*yrN=i zA~n1=!^9>OgvZD63q|sKg$Q$b|9$oX_=KgRr(u&4msV5JGcYmtHaB(;w$Y(I+l8i8 z{8v!akf5)os~=p|InY*7*R?_Fkdo#n`%&wu&dl?uB3s>0UR;i7f zk3VPtu&Li4Jl^h;UP#EfMzCA`q|P zmxPT|kO~eOj}Y$t@&I3sk5WQ`f=wR8P6jvPLcbd)jth^eklNMUe+_@PeR?sNn;NLZ zCB`dc;1<#ILLd=SRX6=`y|FZzqf7T$W5A2KIl8#F4=d`Ly-FWAbNWgx&A|zxe>>Z% z3-$NcdMXMLzMbwa%4+EDzr!U}h_75Cv7hM%B!lHstO8AV|6*vb_*@pZp(rV@E$JPqQYIo z*x7OJUv8%cCpwFjE!34r)*DL8$3K~L-2;6feF6dmY%FpvWxtwbGJWuyludkf*XmY{ z766e;$J0XK{s904A|X&8)K$C42m#(+H;%7Q-T`=QLdr&l=6d41L~n~VQPy(Okd_Ce zK&L!mrh zoKJTzP0ZXp!*d8JTe-v!Rt_Wm5g9t=_pd?`k_bynni?87sjDi{;=WHe{xcWHS!~XM z*rLu(dgjq#8GDa73`BpZgitu|Xt2+Krzfa;TyBsw2R5z{_T$}Aojp6Xu$hl32NnW7 zgq#LN#=}8{1&aw(0^(z{tLjJ)y`G%xpPrmTvF=4aOc2S_s~EGXH!iBO&Lf<9qVmt_i$r#ZFH=!A|=vJnhEV_ zraM12%s(95Fm{H;rskPhIkY^twbk8Q6A^04#)|jx`SuAzNXBbu9a7)32#bs>AtY{Y zq;6>rk`TuFcpcBl2B1*`O{2T->AdFX^WLoymx@h@c1rXp?uJuc zl{iUWk7t_m|M?nAi!l*k;$gCy#4OR+wxip&pgE2)Mjz1`Wz|+>$7?_j7##UQ1*2B5 zt|ke*%w=dw(7_`UP)InW&!9;7PgA>J8;(mYdx z3}m>J9Mml8L1Ez)`*%wtgJWwyz~Ef4zf0yOg+&Xdgk#yASl%bl($`&Z*@pSPrTtSb zsyx|(`|gOtzvhvRel6WiRoSN>ih&2C*PNeIDsTaXhWx81!O1dVY7O5kSd<8pSC#!I zi|^~6P&>B1!ss)LZ81MSHh&s{D|C-5e3Q;DxoNyF-354+t9+NNfxfkTg`(U?Xt^p7 zI*-E4!$tMb=8RM-I^!<@w=>)~I&;ZD;_Alt5=(x#eov$rS&?MQIb}fddk+H)9b<^y z<8-f@USF9gb7k0agDU3QO102*EIa`gN^BaPnVl-E%uNgmG?kT9wTP-1KRcc*@wbpumezMs zRW!8m2>lly>1ia(Co89-swu3Xq$H!N24W&1g@=I!pkd?EbIB>1o7ggB=VQX1J-x*_#lt(UGi778&VlU8u$!jF(k#hbWq z^F1Cp8$T(8WLN8Ukde~1u)KMLOoYp-rl;qYlb>2P(OEgQGuSmTU00Z!9jzlNLwmk`+!YZVqQfaD zg#QhXe7Jc1d3X+mf{jNZs3fD}lM)rbbUuH_&P}Qk?CYJ^+txO`IWy2d+L4!*5@IZ` zz>WBcaMnG(Ruhv~=xH6`udB0x4^-Bcr8v$t6Bp1-TSceC--;09)~ooDanL51PKa-u zA=7EeC%l@SX^?$plh+=qg+=(?OSszp1=`pa83snaonPI=614Czl}(8EaYpj2li zJMpx}Asmi#7+4Y!Su=AF7ZpCrueZtDbX?M#aXdF_J+zrj95_b7u%cL z+k2-E*NbbseU?E3Z=Mp8sH00I&g z77ilx(ZE1mMQclaX;n^cYiE0VRYhe}OHWH}cTY!sL2{IfyS1LVWl%;=cG+-Ev<(*p zGZ(jBRMjmZm5fVh;nd<98XY2lmHhVpYsgPik$9wXw#eMSA;GP7ePZmJ5|db183^PD z384KxE*8f}IozWYu}KM05?r-J$^VUm^;yuaCR@6CE5L{3C!{UI!<^amM+V6u&08%6 zjj0AI9N#+wX`18*dGd5n9d!daaqBNH>^9LQ2rA~_!&hWZ3cTOm!u&8{R&n6x?&}e_ zygSw-H`zTNoL=7CF>!pd507+@#fgonD8ptE?;BZ{5R~93C`tdmu{7S8nV%XI5b6t2 z2itn*??2ISX+hdTN;a&tiUO|`8jLu)+5$>8>Kc|tGEzWHc%+r;fl@d3AX6J}9Z?H2 z!NUBMDI-VF4 zuAl_^{(6BV?OL2I_Q`l{evF$DNXaS2bB%!|a0f*$sBe>8(#f}IYT|{3QpD2NBcZT7 zHN;klX(vp0Gluc7#9I!2EYfdLq>5P+JWV`cfin)GVTE zcE0h2m8D@0z?Xsi%-Ub4=^tXXMl`QGJwHfIIVLtbKC8T{X>tm(G(Mt0I3)61Tw($W z4)I+(OH+#{s01HJdoyjlt<7aQmHjVD7Rv0bESx+b^R&TJXyngXuPM>PhLVOu`9YCQ zhtb2ebp9pd$BK;pZ{sDE)(1>#!n-=9yGWMnv2ARvst>uuDQp25hkR-J=Y#PaGcml8 z{)L{Jx=iuY*6i@m$i^EE-{@dPq^-g1$lSbGFE?#r2GpmW#Xd;7I5;vnt7V}##7O$3 zIL3_bY+<-@pve(o8H{cZHEMs_nUuYH$SNXH{|>df*x5M6|;h2Pfj#IYQRN?zaA{~ z1(Cqq9A026`?n%P5U}-wrOr~E&|~E(j?xl}YPzA_2dGNX72_``-w)R_$tI%k?Oq!6 zP&a>w&xCq@2@_~MvNo9&aRDLr!gwEb24Y+=C+FwW)4Tg0XXEt+iCz{u=Gt-wCT327 zDJ2aH*B@kR4w3D9uQ(!_wy`(RHa;jSDSLQ(thTkm5d_4Y zt4c5>gnQZ;+PFD=Vg(90rsXtk-ar2_@o`CNs&G=U0+vRm)69Y_C^4u}j_+xh2^GRJ zs-_BOu&-GU-qK$J*xp{8QgFULPvPz>I6({Pg3P63dkG|~dNBZWy0Kn7|CE;|0O z+Uzw_Qe0hk1#w3cAd4X1#ELPZ&%pd{PDp11+ui@|@zWi;+_# zsI6a;F^@ohl3R?=+Qy}Zl#b=*iFasN7A;fLte&#Eg|VElQWrsS$)7vS-|;Wv*H=2K zZ#))^cXW)qgXgRL)5n)jI3xmcIyMn`C3#JECsXHSKac#@jMAyu-uaug%BnI4AV29* zPgAOsiK2uIKQmBPP(a_yF`#9lZv%stOTxk-C}(`*5{8h3f`*J_6jFylR{Ra&j4i=9DpH4tbLEj-NHNAaxK+5GP26ksio~PRoG@ zONi{fbChJwwl7+?%`V%vZQHhO+v@7F)n#_sc6Hgd?W$Yd-`;nebKbr0{r|>}9GP=X z#$0p#*2v5lu_8WQfnginULHXOCTOd&Dyn_*2o*;tem(^nIi;dw!(Z_UX6={e=Fvm& z9T+x}%oA^gMKR55ltn0P*MM+anGTM=H4?<8+T4(X5nK+Mm zbZLzVGa>_s_+W9U54$_0S!>uXsxk_F)kR!-on+Fw_bnszlznM5mTJyGWv0)GCT@X~ zb@Lob=jP1!Q7-7SEE*4r(j2o?vDw6%z^vXNn?NO_y4IIa+0Lq~!r(b1CPwsXx_EQ; z7Ug@H3S6EJe9gR~?5acnaF&&!+vk^;kCopCH}-O7Is&@Bl7d*x&2_bYhQ~)py*b86 z`^mRAaMoB=IesCb#d~H%Vmf2f{dyw}`a@4Go?`FS9jE>mKZjr9r-6uxgM;Oh{P}M} z5IoH#Q41l-!C^@5ilD?HA&vk+0AubpGLq|oek*&zc0}Mv8Xk!hg%akObhSKAW~DuT zdkiJwwWycCdq)S-6DOU}x4ca@Q)^`_c5)#~nORMdi9NdOz6^%oV3N>8o<_Y&XlrN`8Q?UZ(LG!UxWpn*dEfyg2E&F_iPamOJ})u>au-s|BCzMSW?)1@iPhcfkQ4Dn61d}<-b z6HB#v>TSX-+k{HJN;74U<>vK0yT)4c&~y8nn{5u4=lN*JtDBt(9A5qIBPr9j#-;SG z>*<-%!GG8xJ5{up~JCrI_E*Erotu@;$M*M;3Y&W=DomK}MSK&!`N7HWFhJupA z?q_&|? z=5#vG&R}zlJ!mvFNEJ!akdnSN@Q6J=lR1HMud_98#dgYMG+{QMm=@9E4blBrEs~DS zfv3*sI&bxB+vg+o#*{Co;zu~GMNR8ii?Od;zaGZ>Qh^% zM5ox!pefY7&6ZLey??hHdHc36+$QeaFV1>Cv(GK zt7JzG^00K8*LURL)J-@1({3C3wvDA+uhLRE#f{7NMMv@Dds3{2u;{*5Efwq5MJ2Sv zz^mLQ-dw58p+FWPquvH@vxRgJT0O1D+r@m5nes7fj2+pU1Xs)P-LZ|B?svO2-@8bW zAg{X%?9~d*ZFIKkvlr*?!WkK?I4u|6LXp{UFyM`Q_p`^Ve6fHVJGM{9DY|FQ=pWCs z1-GwpRkpr9vw`r?MWnJ)u;-4G)ek=l&GuK}HJiOZb`Vt@NnuJ~>`hO-y}&wD-mYtn z4r6s5qP`_!Y`p&Po0!?mYVm$uX%gwf26?0WkV{%&v0QN3dph>=9ntk0t>vcS)4P5j zT<$Z1%1T?_J3iq>{q5?rQgc-=R8Hy=dpzrQ)%$MdM~Adtx){9w>U$k)xly#(z=3M_ zO_#G>Co5db3_iT;fu&Sy6A_PYrSsfjU)3ooky{Hsv-a^ND!-BI;n~wQ%1Us_3o05 zXvK%!$|J2k6)CB{tA48_=a=01{m9~~?AZ!q-HI2P-o`qkQTL9DS4k74*4JWIB6=(y z3GTEtlJnS$W0mbMdy{?866iULN7Ll3j*lFC)YWL;6rU%Ket*zi5R+wdJKTlY?SRdf zWwA6%zUL#lygs~pFT-OmNN2HHoQ$euh&G)z7(aHDa}C0r^SW#o-y?IXP~7W&F1=wt z?pq>h-D-q|BWz+#h*ER<+@Cz=49$7@^=^CVdb}l?`7ITElq^amQ)gSagjqvo$m&UW zA8#vlnhcbdUKDiwc$p}Uxz>x$sWW{q-ZmM~LxCT?-?IHZbM6#2Uy8ckIwQst#_}_P zPp($gsoF$tm9s@>Zj-gk-P`zluj=~MasU4LdiUoy3f{_8yA{qsqxaXSdj=f6k4x7EM_XqIsbe8RJJ_2mOUB}njod$PwyyVti-c2Ii%=6l}w>_v- zM|QCv-n2(&TBy1%_^5sN$i})>T`A^Ev?m-dA1&q%l9rl?yj;Fvk#&Nc^BL)8m)7yN z{LVg%`nhp%o3bjW+mZ5caLbbQ(Vu4Jez&Ifui@}d3nHOSv~Y^rNm1HZwZ zpMRxNDJqoZPELECj>^Cx9;+HWgk_T2o9#g$DzvXF7NAwV#1OIU#8p|?|$X3 zjw4Ryo3H!z+2Xe~Sy>4g=<2zq&D?J`>0OJpi%)iN8$M?#WO@;dByaVF#Mu%{z9&Ot z*&0v>Q_eP*D>`0N+8k|8x4jHPeB@evMh358xI~g6qFrh={LYQ=oSmC4{2loN(8)*B zJ~shQ%O``b7MMC`^PM9MBH3@=cX!8Q98t<=oUVPB1L=N@m%EK&bS0*~u3Q#VX9VYz z`<*`fcRf5_a9Ogxnhl=y+vQE!BYUaU`u1DpT0?4a7r*Hl>Fu@|VPB^$@|_IUeTe=62LH{s=Mu?Ym}^g3UuT<=@3#8AIL>~T=N@jjczm)Y?_XusKf zAdrgY9shX~OJg=xMa!{w`(H-+MfAeBIC2 z?CZzfxvp=fg+NAYW#|P3*Y2;QxSib+nCj1y(-*NeP}1njiUhN;JAaxYtqZ~oyOoXR>(|osygd8AvIxz zpz`zRD-{R?Uvvay>69PVrKQqlMz5;K!)Pw~z1M zCuh%5ZrRVO=4f1|>s9sD-t9i`^_y)qpQkfCTB>vk&F(E=9b|Uw*JDXyYP4q=Y}Ccp zlVWVP8)FLhc1&?*5!Q1?b7p;D4b^8(CMI!;Y--Msw@>HNzY78{$2}1iu3yg(Wk~Hd zYtH*SL6Jj$(K;S(4d?D6V}XG`d4?@qz8#-9Q9N&6u;`yWoRcYf`$#R?5qvaVr_Q2P zO0nm-vuSN?{iHGSfX2!0*E=^=f^> zvZIEWtu%@7wMkUSI!5zBIP7Fvz%Y^p7@kxH<-$$vN-Kj zW20EEjm4jHq>k1EhALl zP04s3!rj{h3cylsiL1}iKkoW>46-uV>=u5W4wHCYBp>s7_!TG0@M$!7?q=JJbsp&r z-9BZ02V{e<(0g8AE(yVg|Iv6Lnl5)8xi5(}`OeN@&=*(>{L;^A_hNhdXxRF2fzIZr zzSgaaH&>3ML5^P`m{P@bD6{IrbbYnnP*|WwTan4?CVzj0G$ofM!~4U2l}T=y)!|yJ z$}6~i)$Z=Ca%eJ>&&hsZNfxzZlI{eD@PpueN#a)Rvh_fv_58`?_3|mD3Yp1S@Qn;L z|MYR~s-Nz3|GplL6s>ZzdAu58CoHlIId^xq8&N3XRv8ajvjmK3sL$a`^0b@=DaQSMXwEH=h_wx3C+*;c%C@4(yn|k7I3CtEo3<-0=~%AePWU zCD~4o*2G3fQ`3Arww6UpDo~~j1Y3Wn&OR=pj_0R<9G@d9mn0tDBQUrQfR&{AP({>X*BzB4gagi0PtHTyLLJ_rIYEwr|@KzvTU}n z#}dC)%bc_=hr^(f3+?^mDO?<14Ariof^c#ErzI2|9IfFKLd9z^2JQTCQKc7Aa-CZr zg!r#e#o^r?QI3b?6B0N+R}Q!QbOKgr9$Em4e2_6o$6m=D^+SP*(e@K zfo_?aSUnmQMzzyyyks!>85mT=AB z4)ldMHqx3+6f3!UB~k)OfnRu+Cyd%QPFcI=3}bsvza-gay>^l6if31d>{$B`ej0EMx=}2XLdB z6kK$DilPRB~Wk4?9xs+55$ zpv+r5VTw8Pam)N0l(oeotP2&5e`f*smo*u|Y9PrQ*@(d~eE-TD@g6p4^WA=G)D6~u{Eg6`4pu7#aXQz!I0akhScO6Zze3C-XZc19#BY(CC!qXzKv{}aKW!3BotTZ zSjf6shd~>6O4ctesN-K8bYGsz*@1BUe0vx#@BxE#OTHI|0EL1PD9j;0)7~`*zuSYOJ)DQ^+Q%TNLuD2paxnb}y%#+8+C=&*&#Ap-;^^?;pcJv-bK|o)g zhHuVs&Fdne97Ld59N%clMz9%l5nip3hEr47#u10YU{sA8vF698r2;x``n)K#T$I zZURfb6Alp)km_P+c~^I{e5}hS8UY228ow7kOmn zaEe@kCSZom@SgyY)CZ-JCkL?a0QT?k3ysTyCF;p71QS{S*`I2j27$OMHpy6GoH5P{g&;vBkR{NngW zl&y9_CEP+%fZF}~KnxZEsLYrWS~Eqx;4oSQ2>S+Q%jC$J@{kmifCXUS`G|17{J^%t zNX)95h5};qsPvRMKO@xZK-$fT(nmt`gjRu}dJFfcYYypebcqB|*)5v2<13)c55y}y za^+1?i-?l(H9|@1S4)PpHZWiq!vd_5q8WN6aq}`S8Xh1hnYi+H2a^L3`eb7TkO=#y zfh7t8a#%@f-$obg%$8g=bk{C0#@4&I0Bkna%=LS>bNdM-X5)s)ViaGK!U|!4>H(5o zK~*kS7ty)296~EFoKZBonB6t+}@O0ZILp1~nll zX_q3CzKvv7%xn<4xs>Z9!Fq({lFJbusG}0ZD-aPePg7AqnDk1H{t8Bru&XH2Zbib3 z6Qr5x$Tha*XU#GUs7V%RdXhC_b2c}~g`#+w#>kZ~q#_pL$HEzClPh^k4iru#HU)vGF>Fk+ z-(j6`kULZMD6zKIEE3Q1J7OaP`BNg;tIupTt@NuxT;rq*}{LcoZqF9z~i(CLMN!k%vA_m6>HdgZqFkZjW|0;v4_DJ z=YW`5nA|^pBqY=0f@Cfn{K^rMn4BEK_y=X0W+PznOZXcJ&d&ZrSx3N1CRVoTsI=o)GQ&>cLME97+qTK&d%>93;KIoOvK{( zKY|CZjLUTPE>L@_Db%dNknUR@wQ^u!1Wa%lARg}avVNXm&zPvke3AV5ku}OgeH+|orHWKp`z&ydU9#JrgX6G!M>5;beox^X$i0mkg&2ur6Xe21H zGftM|heDP3{9#ej?^7cEG{|g`immjVpW^s6;Mk@AR5XFKm*y}*vX)=-^?g^N#6gwf z51FU0co=9|D$kd)Wl z<^$H_r9*66IN2C4n9n?%pWrUf@O>pWn#7a^CS7|c=}poPNf5^z0s}*w!?49XD+wBw z?alEO0cNA|^vaKSxl2k#4+$u*^Bav3qSYXXITm@v%qM?-D+LnB8x1uTAP^I3(z8dv zQ?EaAxPo0yKdD{L>F=UbH{>&wJ?-pqn|2UQ0sP>iu;hxQ5C$!nM$eGCKoUunc*s_z z13Yc-G=SP{i5)iV_xc#LHbq-GaO!0H^fLR>+=R4iC3yMb-+fx|8mc6ekT|71(FK^q zXm0rQgFLlY#K}ARAd1`q%v5*8KwP|hdGP(>PP5Rc-SVR$(%mGHlrX*)1GO(Tj@pRH zjJt`@kE%=~61geJf^Jf{D%d&9`R>e-1~%Bs6#40Kk6OhhVNh09aKk3Kw z!hw&)h0Z=BL^j@Op!j?Q%cSZFF7u@ta{H));={4Q3&6E;AeaNFhecn{WzhBI^_OXgw%0fs8PZMeXvNW9e={>`($98E%y0pLZPQOi zWd8BMNKdM$j_?CIP&-}9&XcImR))H-#e9FdvlIg$#vtzJLAc^?zx0}U`5MDys7-d& zV}r`LyqfqoNhLeIU08EkmYN20-IwB%u~vJ%vTH!c`T$#pW=$!X!L)%cDwKIiYAe%d zc*V5)nFhvs|I%`8(VyM%ByrSQ1S>R(zkivN#mJ)k>?EkkxB(>Kk_=4I!(Qn{5^o{v z)eT1_mNaOm?%heiqC)(hr&Jn`!+mftnY-UXEM8GfcUyGuP+7cb~? zP1&sR16zN~hGrW>odz|WJDk9DPSC!cw0DV#SrsYSyTF=ne1=x88PJQmEmxf)9znJ{ z`Z2)2w#%Ek-CMxHbf(4ZXr08xT21}tW}vv?L_|&b>#wj+jIn*V_Y{`C1Qwk>G2qu` zk?tA7LJ5qSmiPu0*py@9OqdvIq?fjCsN1wXh$C5R$rMA5zT~>!V&^oi$V-9n`H8I2 zK=mG?&mhO03p7U82j8srNj}L(B*1|&h8j$e(hIbs)v=|JM^KPx`~g5yDKWzkvK2hH zZb54Hs=`X0iFf)qIYW9B&hBH4p0}X6q>pA5X`9E)k{}i|*-u4YYbH>jBIDzaNx??- zQt=UhMrL(E!7EY_bVk52tJEl$c64m) zXnFUn=aP}NllQL*`K*VlZQu0;b>{n}_VAH(7j(-p|DnbYPk&F>{ zYO(I8F=^O7(!@qENHvux`B2iVdNzy<9hk2eUA(V`2yG)+eu%d zn|Gw)6;33n2KGdgmTBl7YS)UJ~7i+)RsWLugn zFi7^9=XpHe~B+>7*D3eHkq%t0c-R_4?yZkYUk_#0=^UvQu%TzV* zm1qbdgQF_%X%LO`15tv4 zfEh&B5f-k<2L>bS7fH%fhj9DEJE_k0v_U<7_K%XfOYI3c>#(h*)(;_WHr3f-1M1JQSH%3&AgL-ZaKAy>n+5c-2rKFsP|=Oppm|j<0R%dkHhLuxI@+b7T`uc4OTQj3sPRTD z@)fE0@w)0KGmO)VzKV#Y^DWy*=*0~h);E_7)wUwx%DjW4@%K&1fK!;$_u##w^xpi$ z$8Rg$?W^*V0zYWu(HO_J`RsH%<7>2GmgyL_2!%F`@gY-*>_?R)GP&(v$?(uZjTh=8 zHmC?gBpnCY zmZ20SCKHFHp}dP|L?n_7_$??4lF-npB%TO2s>o?5#0W{giBP^pneQlY)-^oIL6E03 zl|Z!RIv+!1X{Xo{H?11>oIoo{q&*hUN)iBcd|$=M4;-Y@ThnMorMogHm5O%Q9V(oL z==ed@JN=XbNCvBmctb+CuOCYKLMH!|1|Wq%3AS3C+x%ESTud%F9CZV)+k=r!PTWf@ z02HAGjhxKqj81ET3$RIx(Md@HD(h`8n3Cd3?0*7{LhNkeCT5ZtVGh0(OyG%fo%oo| zxU?nY{Ts?@ffH|^`oqnbx$!*Py?1rLTMCzkrrEdDi*0Z2B_Ildmmh=^h%l0u={G{h zw-zd;vEUH>ga!~V+{AZojS3*>;o`%x0Ff1!A-I|x$(Nh^sWBBWBfrQI@z7(4~$eMOwjF|Gr*-28FX zN`yx9JS29ctD#MLE5oG2-G|5O;IOT$e~KZsWkN&|eAjTKKuj_%L5Qi2ENKXpn5zKV z5gBYCg!R}_X-SEjVahL60#l!xi`VQ7OvYeZIYTv4Lzsh1G{5 zE<_X~?Jp7q(-4$S_}PNdqZ2@gkAOUUw9{*U31)+y)<|y-94f#9B*nw8exlI_C*HRp zyAls(B}Tc?4N$g+iHHF`I}PUh9l_SzM;Di0Ehr?$NeR#gqIU`=U6*y zE*=0e3#sx@i2K_pFloBRw_N;y;XOa33dUJ~e07o{xg7r*2es=9aAOpW7yS%(T&s62 zK9S97d$-bvA)vIjgaUCneadm>-JM1-X#hZ^;bp4nA2)U3poC@!`=Ns6Z^3Gz4l%;9 zgnWFwzS!XQPY8)}$;`rA$rQxjcX{xYfnvU)h4SEiG~yjYOKXhu$sL3b1XaFWiJ=J* z&LJU*_htQz$%3Hd4+=1=opji+{7wM~o~Xtw=;4L^o?rYEi&QjB(avG~c@rE1;7CtZ z>qUCBHp4z^3(F+l>6}>@)`zs=L{71x)Cou02$PT?P*6BDDxIq^SYBKPJ#kPT+wnL} z#{oU!=kOSO!g43eEYzu+MKO*=M|T>EuN4MTVA_2^4rwBU#QkwTs_AZI&tW?de2&@< zU#B8oj1-wDsq}zYZhSJbz^o!=^#(`~fG_=YFOv{Q)xyDxMbZ9fgkXA;B0Lng$d#PL ziyP*>7Ws#E-n};jgCn~Kf|R;D5WM4y3?f9lIgcU~h^IJyZZfJXUAtJ}ERwB@`74pQ z$ECrQu|6t&Hov`e+at$>_9$}J`d=F^iDs#fL}J{Z()Ew$yAyaEUhwn z4gQ?izkB^3p8x9X!x*H$16QZ%O@a&j;?IdA2Yl*-+>tnd&H2^%%Fg`v%>P@Atxsa0 zJDG3aVn5|ZouC{Duhc}h`~VZwR5;|sg%yEd#ee`Y0KOU^AmASYaAWqD{BH`30`R3k z#K!{w`K!POK!*T8_xm~qh54fa0tN#7M|0&30`W)kZw{LM4+jYJPdp%iKT1FVCvz85 zQ)`odS$*mMB{w!PwKi}z`Q!4pAt3xAO(y?vR{szHRGhz#zz_hR|Ca_s@W&Jo0Q4&^ z1b~6Nh0`A%`LFDc{GX-{Bld^+)A^s5k)4f=iLLWrGqQh~{xOucvo*7Db}=@=H~LbW zIO5xx;yat0;2WAaIpdpJIO7{xI2u`-!0>$4;VT%_-(c2XPI|TmHvdZfZ&1|#5>&+4 z*aKhapLqYf0~iJX0N}3>aDPKMIUCp-8#o&OJItT-A^%H=KVAII3x@hny#WEh{<^u? zS~&kX-9P3q|Ha(c#LUse#7WP@z{wd#{_DzpdB6i80AK(JOUQ`GDoM!6>Pbr|DFXmt zetG>VJo?ug;!lKsq2k|l`)~B`75XQ!KW>U@A^$*%3h@5}Qh$N-$NL{_AOL_ueP#3) zN`EAupCDj=`M<~i?=booy1vZ+f!F^TMo5nT*gbtk{L}soBe4HFjP(E50e+S74?c`d zoQxbT?42#_Z2yS=7MT67{x1t4>tbX04?67aENq>f@J;O;@l8xkO^lo^T))6$XX|X| z;`jxdFS!Z6qluG&jlH#n?VnqXwl&aCP+45~tCz1F5x*qHrgT01f#3 z(Lh;IMC1=<|0nW^AOB_bKS;Xmzc1H6ihtkQ|Jn|hr&KxAq8N6xzpXS&-o*X4`A<-Sf)t28-L%YE39oBXbHniuwnf$06#x04Eq;a;N z6{Q>Q?tju-6*#sW4Hj9gvy<5Zim68x3p~fd8>M9zlCc?b;i(U<<%Vp$z}(2>T(2*_ zA%aU28MB;cEkCSc^wLoF%`C_!yO^MXb`pe!c1J~uiH?J^M&xJ$YT7e4V#%H_^BWo~ zL(^7-DF+l%o%3r%HHc{zVLhm&fSk?`3!oTCOI4M(CNQ7*6@xB&sJB5<8)+Jg{IXF( ze{Lp&nwYi%VgSq~v8=n1TrqSzbn1;^70@Ml~z3i4mJ78eojin#~GkzP^ zy@@^;*y%-{+$Ab>(SX7iy)NV42ZVTjqDQlrmDAm;>9UJlXu9UeLr`PmJ;9`aJ5;l_)++-l zhrxZt)JVlXch+P#d%B0NFAAhnOQ9baAgPnyzXb4f;};&jN!z#bWo36o)?FP1m8 z&Z@j4>-fCkh@?gHvaFWQj1l2cE1r363DMV7B>clda@>US{G;Lrs%~4MVJ^irKj|&V zt!$Zx5mXuTm=Q%?nqMEUq}*pVp&|Y`(hza#fUkr1x8(zz4hxY|T3vY#)4ln8228B} z2WM5}3v@1DOZ|c&wQ35n9Spq;GhnugVY|xd%Vs=VB~b~KR;)_}YL~$<(YN5tVk2I@ z+74W1yHJ!R{ER`Q(M3adShgXiHgLJij5I>4tFjVMz$r3rMBE|nTlx`*ps{8Gt1PUs z!x`${vs(CO8tksv6uTpPAq$1wKBSq_u)pr~gou8$u=R#^*!Ht={aks;)tt$r`^mSf zt(Tv}7zIIhkph88A@jXjlBkz5estwmEUp49De6u6t({;wOtjT(9fMdC*Qr`x^a-ksBc>|7b@OFNwTo;o=H7&d^R`-BxIf{ux_##owxrNmTq6t~rNDOb zaT1iPz&sAn_ihC)$!=xWbjIQoG zMdTJ?Zo?3{xty(XX+LN3g@ zSS@=1K~v?}RywkvcVuUmZV4Oh-+rJDSvW44ZWCR~8+<4#f{b1nUL3}^ok8QG7HepJ zd*6E75N&8SLq)Bq?Zi;skDf5zx5mVPfGo>4@1eDu6C*If6c_IQRD&hlO3;Ub-C#)4 zr{G7k3WNV~^0hUC<s(&6DS#Y{XYj+s z7MzT_2e7Wy#%-;}4B`T?Mp%~y$>xp;`pfJ4@Um5Q3M_vH?YQ>XpH&Bd@pV^_7#w1m zzuoaPq)}Rx!(_}`C8|>*CW!5QhY>0-4PC4=^|Jb6CL`g~HerNU48GeCqJaQFsZH-q z9Zkfs9pARz2V0`?%2Rta0ws*fsMyYC>}n`ocoLOT{;q)cAlDmTvtn%#e4(82=4$?h z^pNjnI!HH)rie;Qj8b22p3-g?hxN6Av=zu%ZIXo)mzzTkqi~AXGUnql%5`Xg3kJg? zA~8pmjW+1UYiv%<4_t#DK#2U+06ceE$he|stz zc#ermQ^!?#2sjLqphAIWk*9&uXM`>s!lu86KfUQpNeby% zz!IGr)Ew@d7A7D0%=*T&hZd3Bt^U9q4bpgXD=!Q5*M11Q^;6E{Hn zy}W2oqRUn(?7o+5Bg5YHkOib9h(D7HpaQ_>dd;15C|ZHVR6F#PlZ7XbiW(|%X#1D9 z*n2gTJ;AVYBxRm=Q~9kU)51vMI4uZyL;6YJ!FZiI<*Snl3Sh1BJA(^d&D4KWX9EHW zQ2Q){<+ploB{AQ9lUxL2Hv%L>DmCMxdVuHJR$K?UYcW|7lC$eb-S_?m{F(9SzUaGzS7DiWm!m4Wix=B=ZaabZwk zY!`4k;7phwqWU4FFKjQMlsICzZOqm9rm7!;F*uR$QXlzMuh_4x-o0rJtQz|5%QXtO zN{Zyf=AHhd?+aJhPFF}!2InmzW8NG&QbBOTnLL1{xh&Ik__ItE9_|5$pr8~D3R$?z z<4mYF?;AHecb`Ghk^SLWBYH^_cW#cRU+X>+!J(Ta4nE*KP6}7cRp)cdCH2g#!yl~g^|F61?a+X z1+zv(lgEmF8q&;w2nB{u!@;^r2OnOwzv1|r0e$u4M&vnm-8-3ze{GGY-`-9cl6DW* z9tnxA#jI#a%SOg7a_;)7rx%H}ei8tlq_1+cTd-Yo3E{evy%ymN&AYu~C~K?3m$I7- z4Fwz_K(S) z!d8EtE?wyp_TEmS&C0AnuAqIr$w=O~b#8O-kA689*FMF`SP*fY4F5bu;WY27a9w|= zbv=B$S2jd!3n%Z58nldV-&E&@3CO)>%1(SVJQpLr-xY>_Bi4c^8#CovI}> z3`CvEB2^-!%nY%f2z6vzntO_zx0mI*!J6$TI}>sf7jBPI!^@8g!ds)WJRypo@Of?y zFp$V;UJOJjCtXaDbCU7hol3TmOjy9I*n%uP$uCqRap}8|>$asKnX5@339ln|QQZR! zErU#XC^Al|8#We%ujzm($d;KnJMwB%LnpOzjt@Q6ej;n>SMfMEuv zhu29aiKpaQ?z`iri4;fw!{}76%P>? z50ADXPQRZGQk`e>&$%i4XeSk{r>DFLz+IGe8U7)P5F`^dm!qCMxWLR~n=3kd30P#0 zq9l68Ri^T%&mvuo`?bDA&2nS5!9bEZX3@{3wrUaTeDIJ@;RXQo?uX>$e*sk=P(Ku-ni}5-3V079Fl<8k0*Nl7yzNjBDbsbriI7 z>AZdB=_&?SPEJM#dm$E_U^g`_#@f?J;lnUF5OvE+w!(xz>xyt&G{6VY&Oagw!0AI8QOh-HmOz`zFVSiK*WU(X6>3KRI>l+W=t83}L84rG2%E_jqn~ zYz$e$`LB-YQko{`9)tqG^D|LnYp87%!oEp6KV44xt=cNf(Y67j^`)*m#f4h!OyM5Y zT-;d^R<$J1VwO(&6D4@?SjHYCDvystlQ z^`1|}Z+0n@k#^tbSXv%o8~FEn|H9O5D=r!-y%ivQFjf)DNIeWYmj$EaLdshD-ggho z7f4Ull@$|+r!6R!yia_Je z>VVx#66C}Es7JiqUJSrv>4ypcLGm{JjPY0nkFrjxvVb;A7er4er2PGO@ZHfjL7oMx zjua|hq-Ay49mJwkEypE;b613)uoN{4|7^As&(*QKV<#s%`04ZHjftJ%`)8fo1SMqU zuW!WRmu*E97yYWfI-Bd=sT8V=x4w>h???H-8*rF1Wn#ys)~M%CI1XL?;W#@tzYW9_ z$vfxNoCMDn}LkEoOVf zYnDWm0^v61oxMH9n5u7GC8gWeB$Lr<+J*00hjRNX`PTg)O_y_Ts<~yH(N==Kv<=nL zd(g2=o7a-YlJ8fC_{35ezr$v*>mhkLZp!;{5JDlhA?BHL?v6U49-3X(L3U|97jYMT zgoq`>Y(o<*@71n9m-`&UBbc6=pGs5CYV&{Lr7wB_LxbG*B6=MKUy5K-^mZQxP^1p4 z9hsxVpwjf+>3M5vYS*bxoMe3G7#N2PQ!zcSikc9R%1(NULUDxd!q*BNBcu4#FlfUz zE=3@8khbJBE4zm&_)Gx0=PU4io)~8cQt`t%<*yg%R>oNX60C>f zl8Ztt#+@tg)JYs2XLG=_?KKu(kQ&0O6PxD{`+b7Ts0S3acLaW94>&(?#nI?pDxgGC z(bvIt#Tw~XIQJrh{}jBhe5;*S(PdR^9`?-He1klUConRD*07&HxNsOK?40ZNyiC-rl_k&G)1d?aI#&~u@yg+L@XysTsp>trhmV9wY8@zUVE7A66W=P`a zj?+$W!zRty+8nEok1=g*;G6;?l3rEk$termJ-_QUhxFNev&1=J&_HU*Pl9kA;%q^` zfoC?oQcYMq!v=$mTvsOzo+K%O&GCRL`G~Tl6!xrF!awTRUKLW{E`NqngrH8hGd|6yy?!0g|(iUe^b6l!D})@hUezE+a#R{FJ2 zf}h7)o|Rn?rVi@G1q`2LiyM9v&9FCvXe~bxQ9{}8O}xdhvYDgD?+}JyKc6)J7XUXv z$iLr;5-iKwX`3H>%YhnzVK`}5-@~Zc<;1KVlLDZ595^MpTPKsEkMpJPM;wZh=SESQ zf}BWFh3jVFj7Qfb(KKcVrFV$hu5A(c-h|PyR(ZA#u(VuRk7`qkA~OS3aAWz#w7lJ~ z-k5dh(9^dg;A{H^$M>gX+%WZh~(E;tG5j^$yp%MEwIx6weLH6JZs<<_`u!g~Wh;5M3vuLqNRcsL`V;VE95kAJ7Gb{c|&-`-Fug z1J8KUm~blpL15VqTNyul%od7*1JA+JzJn?iX;JrK(f#hwghr!*x2WPr-2a6TW64zM z^P=70eQY&&jWn z_h`=!K#n-+jxB|FB{a$_9blBu2(Tdz-szjK$$7BShu)mg#`DS@U>(7~%V?<|ZM24w zLhHx^VOLs%FrX+%Th&$^k#zt^!DWv-m9T`0;#2rGMxA(oozx4rqqg=`oTv!Xt^O-G zkC)2k$AduCOzp55dw|t9H;I=|)yBk`sL&!iBr0}1stf-ncR?(r7!@u|mUk^|=NpEc z?S~4z6xG0stEMS(ww><4O&Hp#6W*g7J73jL9GN`#+SR>fL-pYw2p5KUP?% zTB8pKX_xV&>BhjZukp)dYmr@-?)FDLC0b0|VdcieR+ym8XfKE+xf$xRS1J>#TUogj zM)8R=8^d|1weg_a5xZ3V5ir7$w?J-myq>bY1i+Ju88{k5C^PPN; z6D#uxjaHg8dSI&kV+omRBo1)@n;`fdh(*mK{epPd)niJiQvz7#z2)e0{RBIUDju%% zIx5S!t@zY7V6GLDqS(h~`jSJrSE=)tcCjxNlt=lsdi1eFNFH zq2uFuPxqSrGwAgHX-|UIcQnm%H^e}+EmKbU51S^RrwX;IH8f8UV*! z6LNsPjY1e2K{7kjuwG8iOOC2>D=b+`al>L@cOl+Nk!7A)H>9;5Pvpdce9u_^HC0>gCIKie}j;4xYR%N9Ph$Ve6^H-qBa zWn)m}O)>Y3b|^~ZYU6{{UEd_!itKRh<|7h%sL@MVvzMs%6uM_m1-MuQ_4TUdl*-t* zuma;mrlqaq!X3pPm9|Di4L)flmzlSPrlFN-_czJkBEE7{IAxo!M9&R$w&(x(6y9H# zx$Z;a7;*9aw;2tpY9;Z$A5sSD>TSkuo1!9^Y87a>e{!Dt@nk}B(2Cepb+f6aYu-U$ z9`yJ6en4U6-8I^Pio323k=vb5rX~-<#-DfN)Nt{QEasZgn)Z^o-VHk5?MXHEyTzmu z7XcZ0taatDS109)ifmIjdzvyLXQ57bvMtBBjfCT{U0H_zXKj4jcUb#4@aQ7l%B*LN z?~462svbAyx^}l8<%mPSm-QqDhA1tZdchRylQH8mp`8& zB&>kBvEq`GpC3*tML(_~xCmPNK3yVO(B5dL1EWmSU`NsH?4M(t*c)O9_xz1Qr{FvP zQ<#})KsGvhS@JDzSMjsr3oT5o$fhlwPuUb6tXtM`A6923hDFLGxPxK^$aG=+jJPV! zMvX%}k`5KywE~#}lR6zRcl4vsF%fAG)_mx2^~id66dbW)JbgaP4Ps5D5?O0UWq~HI z&wYe`g`JIg*vlZk%TvV0Qk}P6RK;;YK-?K|cn}pSavc9&b1Clok3mg_S#L0$>>I-!aT5h+xCh2$x_kpU$ z$vmFF@sYOEm(-1nyNZ&3a@iYS(NPD{EpUNXL=HNFuWa3lVDQA||L2hq`)#p+O$^5X zfh@am3Z-|rC}RY=@OKZLlKEtWu*0bQ(9MTP+C)obV?o3aE+`0Q!a;W{R>oSn)*8>I zl0aHHf8_u--)=IyPZAFO53p}^1PB@b2HUue^QjzqH)91;2km{*v_UCH#83GvSre!^EudMvO3(R>fJl7VdUJzxuI0k}$T=q}YX=ImV%gWY+3bzS4*0Je#N~C2BmRV#Bx}%r zOVefOW>_cjaGQAswX$tI^Gy!lcV`AN!UBm^gwEVxy_08-M-;9+x-oBVxi(pRLk^?m zUGh74s`2lNrttxk#eBug)9azzVr0z{ewi2EvZBug4(*~lsYV?6u(SWMh5>q43dC!T z4%@mrr~qPx>_ay&z`{Gdf+40}RBva)nyPns-wOGdPRB`sEEj$lh^Z!+_-UQD>*XV( zLh}6(Z|<=EQ~GDZ*v?;PVdkrzPC)B)#AS_ESGy}nN)xOV1lKFxRGX*D?$SciP3CxI zlV+v$jNbMnw1PFu&op#h#7D~Xu|;{`Q%K)gLrXWS(m9!~xPf0TS18(-9wo4+!9uE# zRT1lCtjUXWDRbVWkv>H1(7Nw>edJ8U8T7<`-63GKrw_*RlmwT-VlpM)EVUh}Rp?26 zo2wduA$+S(cSHLTR~X%yRT=T=43Q*Y*A5}2HNbl4AyNK}>ge+p$v6c+_rK8J#bCUR zx(gN(Oq5ho$ES8Q_HIu#67H1)v6~!7l}q515%xHm!Z;@JJTa6lOwK!ezi}$_r58A| zlpgAH&gh%|1?Jw=mFwwas$rJIQnA80&1^GG9)X@@k=bN$^YxLBLwjv>#r0dz0 zV#;oFmL=+2U**pP$!OlBo^mi*m{#afMe^}Yqb;y@*aZR_;njO})r1*ae#&OXrrER-9?FU|@=#Qr(eky--jPqJds!n|MeWVKpKN9)%zvp6Y7I)| zi4&Ea@kmmnAM8U~%jZ2)x?-rvFrp|tM7XCnzQp-LMT5jEaCJm2LoqGkTv-I#eHlj}m~L0%?`@2F#J_Cn;GT#yv-w zh6qnOFMI7v#Tm{FHHam#u>Krw!q7-W8B+Q>Vglimg)NGM*Eq1o5v5YfF$WiLuf+(J zBR2mE9~o{$)vek6PRiu&Pa)==bg20(epvNE+A(g?n%3z9eXkZtRx7*(rCz+5~=fgr&>R(P6I##td$etnjx+8ky~T6541{-!MFb5&UD^C z;?}CU8~X(W*Yw}{hygh6^iMSmF%s8qaG@{SxjYb>Dd|MIfyX%~HC%MH`WW(`xu-m& zJxu)QJmY5~9OkA=PgxnB>i3eMm@=x(p4$S`z(%5n6dQ97R82uz0In7a)se?trwJQ6 zJc*RN4K77Mck8n-hApkM^K4Ue@=qsE-^Py*X*1e+E@~XUP}K+WdZ*mC=x)oNs^Z7@ zx0n-UHU(40PVIBq+Y(NS9sPo{)Jf{G{W*&eoi*c#vtD1B8CHLgc~2}IlF(NkBn5pj zv+uc_oJ-{TiAiz4U}&t=%`{rfO4BIsBLLW`ruT)<99h}a5_5zuiUKS0BBoc&B&0*G_sLr68MjLAc8M1&4NxMmC%OjNdQ(ol^*& zDd92^d0CkTc{#&xjtQ?xM1u>ox7e@-c0RnVo(H!ZqT0n~5igTt)ysTNiSxSD{|9gd z(FGID1;fq=ijy>RsgwX`RXu|+&w}kw-%xmK*RsR_8%I1%kJjDX4Bc5Bc&1lHcIDkV zReV{LlWh#+foh%QJIxUpR*3BRj(=ZDECslp=kaR-+k>*MzSGtLIpvd)YZS-_^`jExy+(~S?Hemu3I6$okUdWZF3+6i0FFQ_%Fl#cU6ik0sDb%}iwdaiVW zw7=IlWL?_;TW?F=N2~;;ZYyb8U~yjSv308rlDpjtTIsrsQ+849ELjOFeNuR8x*kka zwg?K{K9bUU`2^*sN>~cC;mjk~)}Cw44Ejhj8ftkgxP%;NGN|uv)!2HBukcL0wOiF|{&ORIe*bw71Qw0%?@?D4EGaJ8O9_u3)CN?R}SYx5tUGGo(o)1O+(Dpr7WF| zTVx-61eq%y&Ds2JR&~JRN6r&3Yynj`eu&lWuPdGGlANxJocLCSLBF;#cYjjpRV9pY zNBD&4kx;T$u|Ll~mvFO*)^_nSWfFm1!-PU2@gvF)q=5dXHsKBoaNV1#Id?X-@?^&N zM&$3ewbU=uAuuBm5`ZuNZ(>-4(wTFUN~WI+(glxc`gruQTKT^`GJ_xz$T!Lq|;>2V{GLb z$7d>uZ~AS&ZxHsgXB1)iS&dR0=v{ob5RZ>IbhcwnQo>r(jEi@y6#d-QOZy%8+=LoT z;p<_fN9f+y$hejxi z*;J`>g%bpfMwt{3Q!m6*MZ_VH&EE+|EBU1>+zpkc1%FA`Zsk*YRzhc{2|idy87&ES zhMU%6r{_09qgwO_yr}Wq;Lzf|?9Yth9&a|OwL_aM5$^Cm7=o1AMf=hl34ZA{j3jMk zkHmLxwDWEngQgx6Pn0lk{$*4iFcou%>5^+KazhU9?&e0?CKrFfo@lGDbETcF-YM8y z7J0E3SY(7xd^|d~i^?Nr?vb0kJw5H2Do}Y>G;_-%9$dvgfKQ3E>x5_Nw!!X)$lm9;cXe$F8&` zVm+as!YD}0bbkjY;zjr}$o_8~$(|4qiwUe_+STEGcd7g_9Xom<{n_c=>^ewSV=@(8 zTMIV+AA89!WJ8A7e;p@F?Tf#d9TxXUH+$fSa%`N137hs52f~$`*gL@3*OfP3V>Zj_ zbB?vkc>4NW_f#)Dm|Gxw2Nv)kKYKAR!c&~*Rr?TGhUR|$**Ynok{7k_9PQ&2X$1Jb zUu$@D;bJRmpaaBoWZhAegqWff!z*?ecV)j=NanC%qS?vBFW9C^%N50o^ar0bMt)Rv zQ{mmS8s$fF3Tg(bRB>w(QGL{r6?K)orG{0qCf!ksYB#if0>m>D%yc3jJhJ<0rFeBu zIC+NQNd@nCn(8aHt6Q?K{h??Jo_wlk#W6<<)N-^9gAMp*S}`@A-SU1TVuKp;kx3v7Q@v6C*=A*4f|Yi0*4*V)gnLh|0>XZ2+Tgnb^w7UrIz-y$ ztwvjtPVIPB)t*Ck-C4ZP{n@s>%Q@g>db}^CttC!A*upzO3vC_8yt2eVY=%;gn)wVO zv(Vx1HHCkkTw{>ew6KGU*ZnUdb77Q4^OR+$0~WiFRNWc@uSdLG6Hage*v-hswCBJ@ zM^k|rB3yN{o~haC4!DQkDu*&vcA;<*&?!yb@i+EiF75C7=a6~E{oEo2|2NqMNFztHEe z5hSq_OyfQZ%2DKEZBOk$5R&%$mSPU$8$LV&CnU=w37Lzgk!$E4nc;5s+8}0cm#n-k z2J|y486je_t%#VYNGsl%0t<&-{w~YhJ}Wp4tUM;gz#a?8?>`NMf<9U}3T;vZGzL=9 zChN)_wEBMpznY=dsudE=djp%Gri)!7-W|g=0>YrX#-%UjtLH8OMtqr15d&e2S4&J; zQNesS8Cm;33@gxi1Y9)6PW3d)V*ZP*j_v!sS!*0dVRsG*?h3))UJymPUS3DiFT@{Q z1uIJZS4sUDUbQYbOM>4p1Rj_G?)}49N1gYqYYhpdN9W)Z@Pv8li=@E0NtH>;#!6&o z=4BjvbIQ3$WYmrdB)FQse`Yb`mqF;$G2YO)KQp5@;6KBu^cv-c3Vy!Ny{#Jnfo?qwQpQiw)ZU5{a`1?wwER9Q+-y%{T z@`#!we1*2N5s4eaLk^Z(-xpd*k@(ZJ&Mic4R>Q?)ntX?}t?+xph7f6U*f=Xdf=?zP zxTqws&kMhv-YcztBfnTv8T0(Q)cu;UrOKfqg&U)918Bq{?L0Zv+)!GFnbuSX7Hf7n zye)OCj#GX#=+V3+vK5+OQ`{=bQ)SDtoVm(L+Q%gf(QFCQ_WcTIrjWJq*wawUAYYJU zNDd_b{bCEOuj2o8>T;u!(z(H|N%MoFO)%&=1u;uV_L=Jn%C$M`t)J8%v7P6`{PaCDHbx`I$9_tG`~m@L>nYErEk@=9dO}ZL+|$e$Ds}UBUMe z#}WY!9Omk*S$vY)no(>kA4QQxt}8EbSnf4AT!-awCo*E9O`YC77_>bC+NaFj`#fZ< zD*_AekC0huUA65k%U?3|9Bc+%wuB()d`Xb#ZyuN zkC5?tzr<<;a7zZE%MKgx2{l=~n|@?{{J_GnzMr5Rb|M#HsvG9TIq<_Xw{)&ww0>nx zq9S*WN+AhC_x|j4Z_IIMu?Ya+Y)gV2?z^A}JNBF)QGUbSs;XtGx$uO5jW21|Wd_@c z?3JuiqPwU-6ehb-!750OGQ8g1O^Ye1yTz=&R1svz z#QHp95MTQmtLZI{3~Z11ySxj+9XGF2{*7K4TeK2I?S)<&JK0Afbw?=MON9>kVkZKT zM|1wyKV)s{)(bS6#a9PdZU|O1(8+B}GuUf+b8Y zd~%BhUlgQN2tHe_tfSZm;tVfTD(sB2FJ>4+3P>5@37F@#T7-}yeNeM3rmlRJqK@tR ze&Fv1YJT*=C!$frPMU)s+bZ}b{%}*hMMl+AiS5WA&w3b5qfXzo z^Pz4tN&>e-UO6lA`MomPP$d>&i}{eJX@tzxTC3;zn%>DIZ_mM$po5C+Lc znItiK<}-Rs@r!Z|i)KhD15Nxmbh}ieWJtQ;HE-P??+DLqlY|s8bNmi}aR%WGs+#g& z)UW|f-9|O)sY$QFEd_#FUXUPR7DYDX`CQd)Ir2*v(xQ4d4Vb!dpUf3~YUKVAz6c#S zjcnGNTI6L->FNqUHii9P3gmU`7mSHJyKiK_T)F@R3A2IMBmUgguDaU{H5U9S_!;_& zPNuACH7w`M5w}Q=nX<0UvvDMGbu6$|L+9Jvl+&U1Ih`;mck9?pM!v z6%r?9PVWdr!760jE8sP^zwFV+1PS3~wsxVpDR7I5P092x;6MATXhmOSaC&p^>vlv` zr;ENxPSA3H&OaYVSZYNpOtOkUKq1MTj9NR6Zh7u@No{zK9s^~F(4nZp8tJwLc9hRU zg(6wdf{{aAl1}4~84YB~En=`Z$y_ESj)HKbd@lYDs!1H2vZ(6uR8nUA-2Jyrz#BPo zvkxN9obh#;J?rYnqA!(Hga0r*g9Q~<_xTcvR$^~sR^-;IM=HtnYsQ2!Cb_hlZXjJ} zevu?W#BOyQ^^wJ=bfCKy`U$SHRScuc#O^^EHC)>YUpnqbx|4fM#NvLVhcv$1kPysh zBE~fvRjVpQ-LpF`u*02#uNu5sOw>p8;fVqY$(kn1uG(#FeiJA-Ye$A1B7lb=@op5o zv|L>DNd=$4@6;jGKY9R4I%LL~B0tf3n}%U7`}}X62X0MZZD;*c@K~cH)?b(J zt4^VyLsFypNh(OAOnyauQJTbhjAj-XAi}bzQ^7c_H;(#JPm*9qk_1l{x4}=mD)#sD zgF3PyM$Ar0I;)DD`1xa3obcaIXNIZS6UoW!Qm$0CPkaIi!)f3oyu)GmP+uioO&(zi z6Ip5t|fC6{U>S1D}Bk7=%Ql&fvN3Q-$7a)%y17uU%;l5W`?gk!6pu z9ulC}B-{_SI#P3XK{4$ge-mPxV^SdqJbO$9544Rw`Ag@TJ0nS(r!$3@1fu@?_K8!h zbfzxrh9$x65d9^}z=1g{2+rO8!Fy@#YVAArir0K`itpWvDbUaj(}$OiIwf;t7nZhe zZd~a}xTP&5L=pNf55SzyTIO2ZwQ@=m2jRCyA6VuoUif!$h%t;QpDO%M5i9+GD}{A0 z5=u^-35+zv*A`tO2C|IiKox3--ODR0d922Gr2_RLF1@-?z5Ye-k#w+ZIrML$);d$v zi1vS9+SXj(gFS9B$$r&RrP6Mi47byhy}K2b&iSNmy*JXbXsFslnAP=pP>gCcU7k#U@@CP_)PWBTw{> z7h#EINK-h@IDOoaMa=b{y4ke3`NkEO7KjTtQ0$uNgW)ePi%g=~5Nh7*vJZ9pH|**Vn1Rb{vPWg3mR&~7;62y47qU8}dVjImux<3x zInc3TyIh*NvJ;2G;J%fz(%P^~4!k%tG-neM;JH=KoMCszFfSkfD>ZDheLI%uK$4EcaJJ)iih(#<L@yaE_xgOc^YC{oH;4JHLi79fx^~4dOnrc1A|>S2-|Rw%qPBAlb<@N-(ePTW$Vs zY5$cR{@*xTgAMb%$y3FpGQ6exDWNQG@|WS?<8+(2wC>~3{8<3?=Gx#NEBlhQlKm(pC4Z^QP`4G1m$I zQS>fuX}|aVKjT9w<4YOi$KpB0AQ~^m7^RFd#`wh;V>}Q~7o&{vWsEUOHA)nXr#QwK zqm(hmMB-UIJ4Pv`lv2ht##1~Hqn1*}7^D37G7nLL##6k0eE)*?^}cv>-}meFdORPu z6bkdL&g&0rxqeJbJt-##h-id57e{3D+vB`-!x(d*lpV}4A{I>>3Xf_CC392;RP)P; zr9zBuzmjmvJ+T7OUd4rFqgOSipb!|Pm^4T-tw&OXDa2Uq(~iQ#t)Cd{DOm*DrRWOLlAyyRyd~2KJ1Uf*BDj8V7JR6446hg%Mdy z91%w;E{P>@LXpz>8CrL!atSx!ChE9gQ^yTf2up*d-6UWDW~iDnViC3Dc?1kvJ4CMJ zdo_o;JV$LTRLEZ!LJeG?Sy?S*rMXd8Y{Ld8R-Zr>D_Smq z==lJ_+~Y8abC#`sV2Q+k(GC#r-x!csVsBm(^5U30S?fMmvzWP*uz}wJ*XCU<5@x|# zpG*l68sd{1wecnH(sbIaKx?d3YqVQ9vW3>t65b)@!NkQ@f22c#5qCMa+~>(z`MG9N zFR2a)_bFB-s1%8E8Ig;k`N|7&*+4s>$R8~;r)q50G?jS><`<&3;I<}V54`C@xgRPY zihyx2e(xB-30N%NZy{mDTV;?&WE9>5=!IOsN0J%mPEwv39c~|M&k+-VVfU&Ns1jdP z?*=3`;|->WkjK_ttkN4j)`;Y4XdUNiDt7dY5d0K2lUJLd+2f5xdC^!7A3oWlYP#t# zN@GzBh}b*OzAF~tOT=yf5n`+7vLFLJfndE~U{>t7)~`Z4$yziSgq73Q%POPQW^EQZ zm8d&OVG$iDsWBrSHWiCtVZ%uoS>_VR1nv&XIS?^>JQOcxIQhjHBT}h%TCKG4&`yn* zh7>OgVzFC1O}_!1wSh&!9*QmLs@J(S)5-ApqG6w1she*?c5r-wguk}9F}~`G_@cc6 z$p&}XU7i+}-4e+`5~T>bMba{C8I)RpHLlpt2$u`ISw;F@NGc~4-P#37a&K?&1Pew* z)1L1DjIk7F6)55xutpzH*hA6WQl;VYi+}~ku+l|-3_HP%`#f|@YU}QGcF zq8s&8k%=~2PR$U*x^{||25LdD6N?ZdEzd~JUWZT|n$lc|PtgGv)ZTsYx;5!Wr;*Ty zc1bA;9vqXF!>mQDYW6HDp2ad1c?n~vjiwTG>1kA$q~{b{iJD0jW*rbXva&s{iQ3LxrlIrMk<|!r~yEuox8!&I3p4g^sp?!s?C#@ zh@2?DTMDNeRzsH6AxU1Ar>PvQP*4-a9SpBXvs`I$T{LziR#kaGJ#8{ao8=tPH%$kL zDvPStDLURoF?;i5##NO#0XB3|m!S-4QCSg|X1#K!4VC(s$@(;xM-(oC&=@z~3kcKQ z!MlJOlQQBVhJLF7&3o|R>EdGyeMmrE0+H6!Zp36I zuJVf4$1IHjB~jIrVCNv}sfQs~EhQB#8z6YQmZcj|#k?~CB4y~6(&c5cd%e+8NtwpR ztQ}=&YB58X+FT52;GMj!$uOa5=X`kw2b2xOnU3{!ys4*WIlO+wUfjJ9XI0`N%MnO$ z<3^x0BS7zdCDRq2gm^8JQ_1bGpWy=ahEW1>SfZ z!`gN{S=w294zE`?e}(aSu~(Chc`JiA!YisTD}tsKr6CZcq9STzOMT<|{(c6NubKA6 zU_mn`l7?uh72@66UR^|Z#as+9;Uu>Rs`nw_x4c>}GL@?=HZnWJmFEgBBuTvDIcJ?C z&V*Q`+8MBJMGyTKOVW+S*g)xip(%Xm?Q)n_y~d{F9s)Zwa8j1~Yjb!86wtM6skNy$yAaRPplP^uFE}+<%GhPp+nOY11fqplK49xTROdi|c3d@_r5S_z zU0Ary-c+)&?u*Z_79TU{Bx%P9q8d8MY~O{pObQ!$b}=M(a#1T)e4HRq_`(OD)AswYC0|yjeG;g(<#LxqWI0>B>N+VY2MZz_G&F!V{2idMFsm zS|aNA>HJul3L?`mlO@iLtWeqUiM?!E#a>2ZdUAV4PC7L0E!8Ki)mswC-d@U?N1)tV zq#=A$tXz$8$rmZo-=yp^6C3~Wk3TiKMFM(B9jF07$#cKdEGS#2d;^}5%4>suNVw-QXP#W`PW3Ok`R7m&inF}*^$@fho%)a$pdxMbQ+ zVnC-Fn1pQMFM#u?(!d08xX29E;A5T+gg98M^L3bRU_DE=IKOum;2Gj6+5Vx`7*tg)l$7-FXq*eF zF3YYX_R@0d53`0up<{m~jDj3O=df1^hjE{P-(%u~=}VZB#HuKQDKytEZb%E&LXW%& zS{88L02IQW7@|~tlkDj&^v1)EoYLq(mL0_@3|yJC875EhaR<$wSDd?QyY-y_CFP3J zLeT68XTY6?JFg#@teniNgrARH54Z&m9lO8yEfMio~*^ zNrf$`j(Qt)heD%RF;d#Ts9j9WEEqI2=*p?$awNzbK_Qu~G%=jao*us5qy(6QwDKU$ zVa`~j>eIM@@}b7%k$N-VYYvtN1v^=$&|%2E-jXW?%P8X6TYQI6Sh6lT&nVp&l!<#A zy^*H%Lmcc@E)z%j9Xi^fdC0e0%LZnZb5e1KjGU~HsNQrUtqJW1j2@6c$308ebu5-`%lQA~Z>~-jR)q_F_x! zd=6=!;f%z`n9jmwpr;zLL8ABc8VoJH#1QPBg9PW}XUr!0wR5${! zv@(vpoEA|61e2F%H1-Tg;JZ#4GPc9=f*FFiw^oGS&%roVVAJB&tUlyTlmW)z>%<(L zbyh!(EDDgxQazDhEXTmzv*_q7N57r`PUk|s=nSNH31dWI87+bc%V9is;H(??dSaPD z=q1lCflx#3Xa%xdyGnMHl=qr5lGG8n7MobEFLFcKNbnZl92Y`k*qxZc!np;oY2oC^ zPzzYP&mQX92`hVXhxVo71nn>{*&{;p1uSBDsl{*b=&aN%vBsZf?#*~L6Xn>J7 z+2!Q}s+lL{Z0rE7kX@Dnwaw@lwA5_P@$-U3W32glLA8?4h-A6xLKLWL0AM>=Vm8`6 zU1qFCos$%fEicZi;tmoVS1+=ZH$_yn7l!)tEN@;!(;`m*Ca2Y<83FKmmsH`XG-?*- zcjQPUdQnpACz}FJ$x6&NvV*!e9;7Uu|5*M$x3@r&rLh(66 z$TVd^ihDN%1f9K}O7XYba%LeU4}NPEoRJqWQm3(0XFEP<=UqXKC}t$dndCUDc~k{^ z(ny51ucl#$CtQYSQY8d(^~u?G02#&)*^$LrIFV*oLK|XF-h#N5Oo4!xOk!Qu(DM5F z@_xxW@E8(K=>oRkgrK+>QKHR&SP*#Qp+*oPxTf*5&<&!s{en4vi(NkCAOI$oWL-Y( z$LuZ^`RFA3!2Uc7;87*x-$1j zOUrZeAhq$4D=lJ+Hwh}UhD^N(H~duzAUA@ym)gz?g%lclwWMc5zG~|Y z#SD0(O6IurmE1r<=v2TX2Xbh0pA=i=Ia&{$PHx}zi7Y-M<6I;yGe_`SlNYyMyp~=a zH8w3t{P+!?s3mZ|A#CTIfU_DUw`l{YTqME|M-bw400o>3XaYzac+ofJO}j!wtO=+f zgp7c0DDJV`F{%l+K7jE76_!-Yp#5Xkrz=_*pm&9P~PzUJ&Yc4{)kuf$zY)>#t z&nF>ehwk%6PeL_sXlhLM)LsmfmDZzo2>K*T{N=KPs1eG@s60e9F-hV9(x#+xSm{sg17T=JC4XYU1eSeXeMbO zq6le-oNQ@Lm60rHyZ4Yu)|h-7j&;NsO);X14A`jDVk0RQ&cJ7M0KA8*?2Y6Wx|&=K zaaQLEytx_ldJvfz*&2c|vbb6PC5N-CrHCZC!GZ<8xWx#QE|nL{BH}w@oju;Kv>ou zyI?DBfM|~vTRjcbdI>gd$#LPUnH~E!Opy+jR%?*Utn>Af$lcDO!S8Moi(B)pc{AV*o|NpeO?b z!1+zl7?+9hEtBV~OX{Vj+cJji76H<7F3{A&0YQBUTIn0gpf(SHA}$<@ORHn*BJahz3nJeTn$uDk>1;xqR^N^)RTFgs7iO=hG8iW`rfqG_ z^R4a5YH=Ax34_7x)43N)nl3FNTW3K_Sh}sngdY=U8BgRobKK-M351NCTrw-xi1#|r z6^+UH`80~Aq@~zGH|)Syg<$M4yQLV>A_Q4Mg&7X~4OvKR&y|-g&Du&#<}97$^$>Wb010C@2&5*mljlGZp-F5W#i6K}?r4AS@_Ev*m; zVgRA_hDP{qgisDt!7)_7k>+TT25-e(UN@$(J`ypM*mlw+^Edo~S&ElYG!JbG=1!KH z@^1Y~bViBUoF*FxuZGn0otDs&s>K^10i#((aW{U87Vh0Wp;!?l;d#J2odvLy1zlr= z^0EMdh6x_$3WqRSl=)Ud(GQe3yZrhPy(prc#ppYnq-1?1tyJEA$XJJ!&O`i+k;6p^ zikLt|r5amqd)E9kl;wq8T_L3@i=1MfH($RFa1ZH?+{&^@^(~Vn#P=?vcRSo^q_D9C zsPG>zqYt@eFx~+1j(O$hU7$w0a|}jY&p~w%5N@3X>)0^2;Nqp_#ic!IOTZGH7miE; zpSP|NWO=T)7rnZOxScH2Z!ws8?qZ;LpHa>glO;WL1saVLl}3lCO|Dd>0H}mmgB_*d zq8zrnUClI~bRc5hT@O3ixGZ4S%Ime!1WMz#W3Oy^5!Cce0$*x!%+m7~6SwGOIs2HB z7Qm;K@jA+#hENMQJm|ZnT;fI=#IsnL`9{^Rzgxm_u6Knmy#rlWl%oj5Xp{m9i>(hAtS?cC0_c zWa<(@Z*L@axjC(g>q&X^1{$R)xf~Kg!~{A1P@}d3f5<*R!JqV)jssg1E{eND!j2_q z$+UShW@*yM$)gy}8I535X!S(pM`igDvut#=l`P!Hw!k{j;CQ(lTb;|!)uIWkouOm) z@>V&?qubrG!|Z$=O=EPYbzK1m@Wo1;vHYru9W$mpsf8TKG00P@U!8P@&o!o5qo~Zb z6%_L+z16d0h3m3B;ChCgz0+QaDv9Q<*5XnFQ0Sx-c)mPu(hCchW<3C?$%NCN?ABB9 z>SGZxN_3X*C%Bchk+@{YRA$9SB}*=|V&2gz_=p#u!TF9~rwjxsftZ$J%7A zD3C&Zb~6gCbZ9Ev&go?!d_{TX2s7_*-P+P%#C@lacpJ5~h?ji$RMi*9$pV^- zaMvz;o-e6xPm?a*G$v||=SV20*m82qTZH1Oi{mtaoSaJSC2$dB&iXz><8SObPBdA; zs18))iOw97CiXR{e7{5jnZyxhQ+*Ly`>i^wzIMo;(r?I}tJ8p4V`;|p37hHFDI@zu zP?L2m5MnINyKp^j%hv6xrDto^+Flqe$`J0@O4YrGy9o$SE8+?%d?F^by%+?(l#L}S zT$V$ti}(;cgbPO_(FWlmOyMxK3>^;S_z|TZJw$A`mgXRLH$v*JqFr22+N-&clYw2S zu51_rYTR)LOA+QgPE(hnv?NVCwg^O5Ug^=eV21~$EZgTL>0-XkpwTgSDLhNv<4d(* zlDd6sBXzNnCqUJ=2n>9YHxF!`=T=k2OIOZ}NUlxqEK9`&c`3FCB`I26G@H0OF*Ox~ zU~vQ{UZ@39MC&R_Am>BguJv9Ke_iLmTUeA6O|Dm`Xkn|Z!eRwrvdFj&HO#xw#uK0~ zWOhnt9c)s{9GHj_2f%6Vg)}O{pedU#!xr1rW}u;x?mmG)pwdVsKB*zzddV9szv8%h z0CFBl+4@Eysi*JBKr4gb8zFb?nal*ZsuCxO^EP*n2 zV=OTnE8Q1?yUGb-@&;D2$_QdC#ZK33#FUrO(?!k`Dj}MTEB+1L4oqpYvY53|1&8H0 zDIL?*mhZ1uvpSmSH`p}7?2fV?54pKXO@b3!%M4^M3)61I%py|;9mY9KS!j+zo2;nY zXnSd5tNMa}i#k@tm(Rxz!|srAF^6G8*FsWsHB9AU%{y6f03x9d^xEyhs|+JFnl?l; z#$-+GJgFW?uZ3J8R`D1W_=ZIg`Y*P6I#Q@Di%56%Zg?59QqKG`RYep2|{d5dxv=$c6Hk;fs>2;8AkDqogeP?bIsVpq=0pkWk%H>I!rnET!?wO z)UY~@l^d&NdThs}RV8$`T?6)%utO@Qe}K`o$XR8geOLsX zspybPXH&CSv%${=` zs=aG#l|&Wig!fA;BaRk5GslUwWT9rojzyT+c9GINh^=KuZ0%F4aMd(7Irr;+H2$ob~gY( zc2}z6yV@0^$oj%9d9hNw%nT`8#5>Bas{nTdh@`CBIR=70)HGs3a&VC9i(HrG&^8+% z0PU9d00MD$MtfH;yai2wNc8gXu6o_X0f4J;tinGlQZv0Mviab z8R{E1t9U5j#_(nHExIgc`{K}!^Bj^E+E|}mY zK+@i&Gf>d@Lye+I%#2e9+)`3~mz2TBB(Y5>Lzb}@TrXqpcq5=V_d=md=BdKR1|x>w zMWdddW7;h}01toB5v zJ4Se31Tf9$Rm}ntwB6cO-A>j8QA4fG>*DBe8-m&MRmliT*-kyQRPWezHrl!U%5T@J zRN`~ljo}zzi&IWEGN8U>wX35Zs5d3mh2XA~stR&r>_- zJCnLAaZr$4-*7`UTcXBksA~)$QVZl}S;DjtDbt`O=OWL(hN0sjlOs``(g+PNDUvss z6L(9~*5OtzEs~`T?OJlS*vYGQF09wo$YO}OM0It<7oua<9arBy8#siVXQ)?aetO^_kTP^wQK95al~x2M7$J17qT3{6OS zuP7Vz%66zWqdgIlvu@^lrClLY&orjUm~!HA)F)0gQ+m>P;hHrp8Uz+1U*b9m7xj7) zX%|#asdoLgiG1eIUeOk=OR9xy%X`nE^0Hm>DjcFo)1Wrah8X>Fh|FUKC;V5knUSVoau zs9wA+bQ$?HHrWqRi4laf6q9i?yMc3F?Vgm_$GCENiE$}j4YL4I$;I29V|@u=0DC;f zYxEc43xQw^A!8DgHl3>^PMfh?s1z*`Dt92=qST6YI20^aFS1j`NI9M&@>pDld_k5b zcgYIkZ*1uy(nO^k#ie3^rZG8fX#mAsUuIRe15v7_C^U65z}2Ciilr)X{AhRpo#9!p zM|g!>O(6nka<13e>vMzh9!?Yh^KNN+ zxaQp3F2>dlwh$mSAWHT(fHrg(!Mu@q43uSZ38T0N6>N`(=W{dEmb;M6Z8C>nz#lL@VZEfA^7z0ux%m!EKs9NBGzqT_ay_?EkG zw<&>`@~r)!q!=PVA3FmS10<=;Q1i-)qFs(IP;a=+LE2JN!RQD=)CEjMDz3awVsIn!Vz zYarx9dJ}AUw}=(1_0;-x>=P>jp@~AT1k4z`&<|pOa)bs6!+0o338#PJ{}Pv1$V-Q+lA zyR41fmfZ{T_7++5P({ua<PT6 z#n4&1YO<#KPyolLRkA1X0uxMKi<&>QR`W&_S?ksfeU2GB#}ZU8kij6*wo;TADhfNU_jaiaUGt0N;}qkG?y>HsTqAM7 zGE$Zi=dZ`RL`KqBi58oqf}SXe$quMbsOm~`qkb-^UZ_diBCpoO_ZI0Wx5eUi4wtPH@+j zz#SLG`NadruBMHwEHd{dO!E^Alk3Rb5Io?^^>7obX=999=!w{5!rB53xL{)z*3%;O z*cG%}#e50@u?GM;rtVzVq+lLqh!E>mkf_`=VB+m7w~#R5V*L=s9c%}6h|cpQcy|Ux zYE&KWi>Q-L8wSikMz3E%-Lkru!fkpXQJBQNd?TtIaEI2`UwwMlY!$C9@c z-?U6LKoQ$Jmdb`GWY$u>?Lr+1U$hr4pNqQyI~!EgR-9G}@xk+s&vTQt7wAZ?3R8X# zU*(iG_Hb9rI70ZA8VY3ZGsYrgDe4tTtKL!EOz!7e_w$yBS6+=}HWeo?w*m}hwec=m zZc4P0lO&AwFrbaT)gml`C60oMuEI`kx%v>bT0DEMa?mm?O~h)4X&D7mnZ-Nps@)Mb z*2Euic+Gi~vW;HTZM&7B4s03l8OpS9Opbfxu3LC zbzjtkOcXuBTEXc+!h-9g&X14Wnf8^)hd~bNsgi5qX1(J%=BtSq*0W zoHYtguc;v;;CEm!QgEGyIpFxlx(Ui7{K z**PHs1zqVRA#P3AMpH~Z!$T3+ntt=f*RhZPRNT~Rocik@O#IYk9k%~`PTSV>^ml-( z|E~MqpMLApFP499o9Z8%8nZ6{2jE`q^Xa$$ktm?jzVo+TD)Y~OGUHrwyWjl^`0(%l z`y0sr@lXD>;%~n9-Olg-&A+Y${Qvd&{PJ{bVb1X9mEX#S{3aBE``+Y-oENT>3PGE0 zZ0+<;MT27MXFH)-A{t!w{pbHV%uatS9gXHa(*4Uy==668C5(rUONoVqli$EpeOEgC zpBHC)zux_^>)*|pNZMU#Hh~d|#{T_{-{=!mobgA9eP>{S;jTecH6YwapEW@}Mzz7==Sz13erO z?J;rtQsoyxdUw3oP?LY{L$i1{$-+-o&z~Q z`)%r?@hs_|4%4$MD`e*XXbu_i6zlr{jqEHNfEB(fiBI2`wA9a*BKM zc`ExXR`E`Bwheh2LZlJ*a6@K4e>wt#g_PNly0hPv^4bX8~@HbTpWRA`$fi$=V2B`!;s@B|7*7Fo&HYTJvWfev%b% z_3E`(;5Ooc4(p-n-oDZ0i<^1E)(_88vTE78eD&vss35}KqbZbPW<@0Xsm=KiK}`Kc zhd)GItw;^zjk*8H+gHQs2R2XUW~T3piaO=v(2(2a2W?ynmI?R>A6;YL!->AQtG;j@ z2V#Wv9+&&*t~JwwRYEq$*)NBI0*H9#u8`#0l!JIr50vn0){826oUNXM3F+{3l=d|H zNJD-`0U^b%P`WrA3+_FJJH|aV9|pT=2he->tE|`J@iYFQIr#ZgsGoHtM82;&P~%Z6 zeR^f9EezwFzI^}q19bxRbTDx6IqDm}^HF}*P4IKlA>p~^&gYMLoK|N|tQim2#b|AV ztUEw+MNBRD?Ai>q&OPIB7R3CTamZlR+FJ^eo*jKVl}!a!-LFaoc=dcgVrBdza^{6= z{J|;0SM}t{egB+ov$+CK{`78Tdsx#-uA>njKZ4!mJfu%~e5a54_7D+l`@rmfB@?bq z$BUZ34gdacoku@-KM53rC%A{(yaL7Uq9iS^SaZ-@Lw+700s)CC77TTb}Jf z+z%EIP>=`P&QK>%25Z}fKXY^QEY;_WJp~Txk5R0PAOBM{rmI}Qs5-O0xp(9+1GNuq z&prcY!gJWOk8RJvZ*d=pdvz`x@2QaVQTQ_VbDVnE3*O$2JKYKB`dwl)4S^0ziM4;$@t~UP2cX;xf zx%m%@%Sx;MjKdcH1*fgyDT|N$S|;e{lQTc4C1xalu6a@V)i0d?P)}Nb_MvvL!E&9v1H13N%*sd@^2PB`fJCh zZ#L_G5(Iqx>mPT<{}Qz%Kz!K!)$N1sb?CSM>^t<#6VE^VZ_+pZPvE~V(-~9Oe{nMM z#|MA2;zJ1f^uIsH`t13~zoh-nuUCHfkKLW34)x!g)~~-f1+9Vm`1LwAzi$g z>A*xRitpAxq5sjt>c8yxyRH$-FHQj8$awHaoSDt@->sAV>(74qU;i2VpI^mx)+~dv zr$zU^{9^9aUsdLR{qp~8F8luDe}216tloy-{g$BRyJ9J)xRie9H@|lDVWI!~oWir6 zACDVf|MUa$netzLnNcH^DaC)7`CmW$r{)UG<-hg(2?2TaKk=`BbLKbxUsVX-zHpvb z{#Qzqy1Tvp_cFojeI0-N8w;;45AHnZkCNnXB4NL^uTS&oMf~ZHe(Pu7P17#__~#>k zr~iJJe%Ss)!^`jdY3U!9VDmG-aeKJyHrobYTNjp=_l}~!{5q29}<5TSCoFV zs22b6ozq`>er%rp$Hq7u(ck_nm40IEum9pZ z-~Q@!XW`eMwns;`e0njK_^|wszWv)_%M;Mq+Uq{MYx~2|f_+ZJBa*VS;jjZ}_T|x+ zpxGDv`1$>ZPc=bioBj#(eZxas=5}ix;1w(|w}#5UcYTmr&F-+|!gNIJzV_XevO=); z(f#Bu&*G3h!gDRz_o|+^y?j>iVbgyEc~m$0<^Tq=`OH2spsKA|Q^ou4E2C*>bmE_c ze8!!lwYk2(SN3}je(QGt1;tMoRjhL2|MT_a{}%~tum5z?8U63GuYTjV9?%(&3e&%K zcJV3XzJKtpg8w0sZ@bI)Jb0^W!>(*^SI|Dav%PWY$dpG_FCRWILPGt@xshP<`a znFFI7@m&|GO^cf=&?D~ggHKRGDt1&E_|kg$A@JtW%f4o3AED-62t+n_pH}qgx%aHm z11=l^`)DnIOntPw!Vg~dPtDDAtIrVKI_RA!ACKk3fFJ1}uvE?(mv%&mSrnSO2LYfO z#(8=K5AIR=h}#w-^z=^PTKVMF;3JJ?Mqw9h|O~q7f zjrjsN2YLg~*H(e4`cbnP|9B3f#XFlj?d%BQDUhUFT6sVaG=z_~`YHYCW`7^m6_0SxPXWtp@*T9DRUF6hv-n>R~5ynHGP@l#TO42KP zK}h&nV?PBD#~=Gh4C@dS+kHb3UpTrbM9v8<7x6(I-$uGbd7AIPPyIkqa{n3R(c79gFVg2Ku5EPZLzl(UU}N8Nzvj-^a1-x?`5)~w zSaU<_1}b00gYCN?U>^c-4l(#8Gwk!Uk|Q6&n~#p1cU`tm4R@c2+jP3_4^%9!u77UO z8f+EpHB-ei%sZqbrN#X465wn|r{P!n>X7%J9eg0(NqQkM9ECtbgtvnN){amXhfUcoW`os%gf(`Q5YabP(FY)Qo53dlJ zYtH5Ze%-_FxV1)!-=5Lq7oMICitocdjaT&Xs<_vDo@Zsat($x_)%i-!?AIYET3O$l zh2gR!~81s=uLGV0kZl+ImE{<3Ex+$IU?U%VtrtUO9JqX4P6k1$S_6 zg&6ly5EfcC_cnb~df>H%t3=*=5vkC~p1cuM;rY)#_9GN1Xm{J&OM*H6#crKL>El$y zCkG)YgvY)-efH(7;6=3g>QxdSQ);YlEy&2JiHGLz&Yrw z!?k69xu-vfGBd9~y|AfCAFrX1$hLJxt8@AXh1%=%*xN{S<^{hMo9{C>-1R#YP$Oe?4cnjB2Y06{3mIQScN-X@37X@^%~bs9DD+!I9!YKZw0@|FJ;xhVt(2 zUI;Q=r4B4OZJawnb$0OJ!}vm=UlYGWZHUA8!AuAQ6sBB04EtfcIhX9k>t}c04T~vw zqCAv+{rP&qrs-_xUPJI-^O(;YY627?=ON(pm+LSHie$fzpz4|%nko2{X$BGAE7aEX) z1^vQ<7nV2Moo}9v`o~jZK5R2L(1#GQtzYni{?5AxFT+6y`Bf^G`)phVpyS@19Yn)N zFffikc+DFhoiQr}V!?Ru3|cd8Z6)0e*garY)Y_W<`V0QatGUr}`HM%ewKn8t|5Ltv zbnSs?CS3I^01933^>xJ`{Vx3a*FXQk_x`+9G446as#gCu?H|JUI$>ic_xHcHH`)2U z#s<~&uYiB_XR?8bnZdrkzwYY(JLQD;08ahRUyA?YSqdWmSLL5j|DfuBHz*|X>`k49 zg$le1(&bF@rS=o&&A|iD-KlJQ8{e&-gS@=%goj_>cRq(dkKg^`8%1dj@+-zaKJEiL z9zkaK+1cq2m658w2~nns{p7u9TLZiQs0J|(*YDk#JFS|^?gok8SKU3Zjyt|wvk+61 z_)48mk>BSRzIFNHn=0zv5caHw??86>g0=1sRk=1MV-RvK`Eu*r(NSb>UPb&FV^ z75AfU_mpp$pVjw1!a;c7BRhT^0%_wOs7Wgj$0rzqf|X~wQElm3fE?cna~C+K?Dq*% zop;IId}Q_`*1a>tlP?R%M`uy!<2?sezV=2o7kjWd6T$&jSlS>b4C}5sf;~mdIHR^F zc%}FY>ha@C6ZqNI>4~Z3h4>z}?Z$q@F1d!vg*zM_GGfxa^gk*rVCdMS7;*cl$_`|;Ua1tRO z@!3D~f{uu7A7$1}tptxQK~O08lZ|!vz$ve}ce-ZjlfP(wHRyIa>NGyu_!qK3p>DXJ zPi1!wzlp&Jb**~K!rJybduzc;MN~;kEj6dOZqO)@HFx<)wRQ>zhXbES`yYM@uJmm` z(C^9bR*XJy`*8i2%?sI`_5(A5Klf>yY!_Ey&#YAy&uJn39{b-RKiqp- z^8rZEIryjd+vYf+GX+3HZQB+-K+gHFP8?tYAM$0wM93!}#9-iq)?jxVvLREc4xMIZ zjx%4a&2;9&gziC9#TnF}+MV~bNES730Lp+5%IK?R!E}zMaTxeat!NY=AxPe8*Ok=K>F2oH?r~+mB$QFqh@g z)4p!E?REEPRi|gFy8+0WihSUo@v=wBmi)b7=NeFntBQf!s`~n$K9%7$F2R9yd!gXL zONc$dyhpQcZhnCishgc@H8=Zu;!$v19l8@DUIJb=2#C)b{Jby1n5>Saf<0}kV(~*f zyQ{T9Hv9&{U%2~n1q2y>gbWMCw3+vIng;qFClHEYqo*qq$U3s~`D>tXTOqS~hWm!+ z#+!+kYkP3_QI+h=XkWNro8Ea=$J!QuA;N0Z2s(5Dv~UoJey(^*h!4uACTQKEdv|g9 zckgA#-}!(P+>5?9PUjps_1Mm;cdQ1E-O_M2{N`!!PWK*8^{GyI-)t)&^7I7ITmkBOy=~L}ls-ys3iZ`)HyL1d2nOk6v64R!q!I*hu-T zxfmDre%yYBRoofI4$}e;C$2xLNmC9k+X1ZsY%aXdm+xJ-y;jkmyI!%MM%(yYU;N!Q z1dG*zpQTe9D(>CYLagsQHGl&b%l8of3iXf!{X|{K4~VArUOj4qj=Km-5;ZWzs*~}F zeP5i+H!nn-blfnu(}_eLvF9?or!rxJhqO+fwv*=AULoG++aK784bXIy2>PkpsS{+M zAu`XzfwN3NL)FVUXH885Ru9N;_{`*aA|m!$CMRfz9j){M+$3Hbb}#eu+}n>ULhUR3 zohOKGVE*TD_M_KF*gD%N<>TN(=bOO>_NPm#<23$(+v*xGlQuE0MyP-)5d*B}b;6AG~Di2>D5 zJ<6wh{3E}D-2Gr;oJubsGGCHlcWW}I20sJW<}M$C)0O@iUB7w!l|shq?rhbyJ%2UR zX07XM-lL}Py|r}Sb+3&-qO8b7pWeOV!#tzmhxZkB9*cY*=Q}uPn3DtvjM-g`3PynKSN*0}YQ z@n+%m-I>fxmJ%=#9!>vZ>6Gic9c_5Q2RWYiS<8P9df8C(wo?llWOJCW%y*HTPatz0 z40!foCB%vMJ&h~xjze3oU;O_Jz43pNW!U~tGzp0&Et*J(I1%wX=R}&5kQO4YwQvzW zxF!+eBE(6AM7ZW&5g{SrBqG8!k=CR=(wY;IaMGeVX%bE32@&VQNwgL%L_);*`SJ(c zKU_cD*Lj`Cc^vO^mee@{Y3b5>CZ-budU*R3IX~V*gM%?(NR7y9Y&ZJtHod;30yU$m zlA*^FRaI3QR!ukDVJ{9sJoH4jOk0i?*)f7HV_%?8B2}h*7AqgoU?_A0I8GcJKQ(Ob z?G{xNVe!daxKC{1Aqeei&Tws`+7cFKo4|E|z5y2&>;iaopg}yMjzh0<*y?#AJ0aMg zC~FrG#=RDgLF5 zl|#t!c60Y|6o^*#`>-O_AhH}%ks!0uBg4+FabmtRR*6HpHF&NYRBJI{N$p&7mp?TT zHS#e7^Y#u-B~Bofjf2bTO}_bxI-aTl9Ph?Nx{8(G!zQ0 z>#MMiI}l?P5Su#~@k59NR=+Bmr6RVl1wI)YWR%tbO&y(_3Q$#Oq)-Kc6B=b! zs~>{Jl4N3@zF)$X(8^|}-6A#}TRha%m^avO>UAmPqkY|zz;wM#X#?RfjZ$H=QiaFh z6NVl&1&7XuV-x0{Aw#=D*i2P4iwvIOxtc~x0Elbi^UeAW37-KekD4W%iZPQGt(B)9N7P_h0NTdX`2=%$PnH_GX(6Ey}1!;W=~f~m$bf7 zJFGy0V(Hn?#AuI7uFwW^GDt5>IFu#uSPeDvZoN<>)h8KNxFN^wPL{$;O2HA|khQB* z#3rDD3N9vQa_OkJhDKq!39BioA4p?aHM0g_SH90Au_8P?Cce7RfULJwKssh7WbS*6kkYQ9mcz2G71ixn6qR3Drqhsf4UB9jvs09*7>g?T z-efnU&48RQt!GmdLxZ(V)Icje$#2rTM{$0n%r!MCLwPA9LX5}QAGdWw2M8jd8xgeh z*NOdInHjBj!k!-Pv}R;QwZ{ySC8aQ^5YSa^V5G{RVHHAMkk`8PJ>#%O2c{#*NpclU zXsyG`8ZNPDhW$WCSG`CLrqv5@6J5yRut7CxWaKc}#@RS%K0t+9pzm!u5Jr0=Z7T51w+Gdsk$E-?e3^8 ziwFC4t=zgyzEJE>m9kz5SJ-auo25A69RX0KzQ-4x2}j~K63WlP=2J1pkXvu-vUggH z_P&WkgjiKemFqhUdU>z5y;WkBHMPp>Y6V*RbZMQQ+dDQM$zx}V8(gg;p0GDqHqDLJ zi<_CW(nOQ4tyO4Cviyx>)NlvYrK6NYW<$LsNxzrm;?^L+i4pOHoHJX1kRTHV24*0r zBPI>Z$*$tJ2B)EaJl&a$Rv`(3A(Ys|>K>lRj`am$s8VcG*7Dsr{Q1`pe)q#Tu=J7G z$>zm3PJOAMl*Eq-SsTh8Ux;A!%utkLi92Y<_B~yu!`(K^iE}3{pS=HSS!}QslXh^| zfc$njW6jdahIhaI-uC%WVi|Vb?yrX*u5A16=kaSLtG09uo*s;5{uuQfUB4(&pSd0P zUH#oZ77ZNx=G(*j6zVq*oOTWd=R%VY{zyIjm;CwH@8@ho7p}oMZTi2# zK+Mw%uYdm`6}z;K_V!Thzh&#Ogq4uw<+DfH1zVoJf5o_~VHNNZhk`RdGehnG2vRAt zSdh_^D}BuQrda}$UGR*u@m)MSG*w;CaE}UbdL(9Usv7GVh}vx9w87$9D}LALpg2%n zuLq1&$}B@3FJ?F$<&Cq&PNagWl4_+S02xEUj`a}|?rf8!HQhwe=vZu(ro95>m@o{K zx6aYWBLzaCrYoGTtEbJ7K>VYLD3!U`T>C8C zr^maY!vabpqRlkWS*gePMx)N)OiNylcL^hH6r`@GTsB~sRprTQ;S|NjbXY30N`(R4 zJ!PF&jTZY66Zq=+^EPQSaS;IU%kj2#gAMmj@~I`K4{u-f^n`EJH(P>j)_-;?_^`Sq zSWcIC=4v>HzZ!H5ncpJ(c~0?M`rq_Y%*%4!4+#Ey-;7QL?CM?5{5yE`)7RJ0uq%ml z&51wJ_?O=m*E3&4-wXG)3uXHcNIo)sckWd8slM^Q9w)04Yl&MAesv*?S+fZJ^!$na zTmSg{P~8*%A|gIdj0vSR4QM7XShvv7Tw!)h0NY9&HgbaPzLJ0S*dw1jBD(S9!?E@i z{&%Z>IJmQsPDa%_XzFYRUKmxxf*hX(bpU>;1I=~Zi#TIDCEuZLV`Pk&=G zcFNw|^I_j_3kcial6D^L>HFrS-Sgw+*|l5u9Qv#;v|Q#%)_wF7e4Fah1I(6vmT$if zO#YR+U%IMMYB)96=Lp{lU;JY+Fb8_NGz(d@_-Q!g@sdA&{9@k=sBH9%`nAG-p+Ybq z+`gga#rM5H)?VQz)SWM1gx&q?k!ocWCpr4ryKj&ej=aBN*1C_`@bdoe{zg|4mdtoG zHNT%c@cfcxh$TxYukBG=KK)R<>9M)MG0C3#2TyLQfA#Cf_Jm&f?w2bk)_%7ga?Lln zOm=m{7ynjndkJ-2yEgb94S4U#ZN}U-OZ4uaKUsH`e6{1zbEfyXqAxRBhA{6A-h`G1 zk8XQld2zw-4dH z?!Nz+aP!7BE1zY*|5<1STXUPT&lW9x?kz*_acj4RR~zpnEV^NPn}xXdn?wFQ@aL}$ zsyDZ;g5LcD@GvpacX@6B6!P$fL%v&N4wsfaxN@>{*Uk?{?>@aSm;B>phUlQ#;~%{e z7#aHJ%P-8QzC7(X+54?~?6~5CeY-{Py!{4~`P!TBythvxq*pwS-n?YtzlrI4rN(zz zn>JTI^C%2j09B!Cp53vlm0I!duRou2UHxa#Qqs#?2}BzD_u?{{U+w`xs|n>L;Q#)b zOU?cBL^W!`qY3Z8g&A;yC#_#y z{%=7CIsJ9Z(z%~?+qW?{u4(?Le{5cOskHO=XE?_Lf83g#`MzuK=EsJQyac$|t9kv^ z)(<{+24cy(3oACa4MuZteAViTWtjZu!nesS?6aG1?L0Bz|D1aD#s8)s=UyRxFq}w6 z^pvSjcYXBh+C%3&Ct4O7slX>+P>&{O!YB68RukSn6HnbfwWlm_I(Qey7H&h`>XpB% zz5wN#d=uZb*XB-t)^l#bLF+WC>T&n})v5EIG-03OkfLSHo$FT$lzl_-2f4}fpYK_5 z<&%Tv>(3nxrsko`eRKakLnyD^yb2BhW^?x*fS;(OJoh5w1q=?6o}CH(IzD-O`rcD0 z+@^Z!*5}vaE8vfpmM(xivlP2&<2pPVF#CI8)ZKq-?85NX2bl%9)yNfuCqZ{IlUFXD z^Z0+cmMr8SeG0jF&4!H|+1y=XwetN#h9f%dhk_l?Ed<8C_nhxPAGkXE@M$z|#hRta z(g%P3dN~-FxR$!N08+Z>)~$&%pX=U#ZAZ)AyMNsUEM*+D|8()nwOC}taqMvWUhXTb z*XYF6l{L#2j>t%Lyx_%?>xAs&35JQ$5yPZ-mqca$|a9X-Mn&a?@r?H zN4L*)?pm9Tec$`BZ2!JDg);dEN56KBPd}{Ku5#bsu-}uf+TAnpu#%5$&sa(P#4;jW!WJhO8G%ERYL%%YR6vyRcYSM(OotvD+S0kwEL9j{FK0wU2`!M?A z+@u1gs~WAgO(6}<)(%x4wWb$Zi|80CrRvF27`r8+s#A|w6X3P!sgCi_9?YVoSQEK#posD8CQSw^I1X^{R1X8IROdmIRH~jd9Ltw? z0$qiA=?Y^VIyvl2U=$tzhAy$O=1Xzq zvoY6@smELsnX+1ZlO>G|GOZ#%?crftDXiWyf(%%zPGCh!y|>>sK0G}g59Gq0{(dFP zQad%Df-uapc)?H?R6=t~F(vrM+3A5ge_k~*%`UXF1jY~-ZHf)^U^QBrWfCQE66r)d z4pnTxYpW^jn%vZQCR;h6lUh=#sR4n0Uc@$5fZ>P^U{ru6GnzHQWE+A|juLWdgt9TK zQO>LtE4=wKy)#};6xWZp)s3MhXC`tE+N4Qs?8k)r2dOCyWxjyHH?o^-add0OPtsWU z^Bhb4oVwi|?9S1Z;1MX2FD(yw>H{_d3Da(C#mEg+wGJ1RX=gXEOB=zh&N=t=WQs(b zFK%WRHZd70R!IW|VQy)%r-^Ekzl+_Qr+Y(GdSxu%jR4hE6na}#D1iZB>9Eh0x_Tu8 zd^FH6R`c2P&QvspK#9!VdB-4OIO_s6jG70-dNX+rG#)TQOVjhc(}Btmf>ZB?Qbi0v zJ1AZ`V;4*t8aTdyR3!o>JXPE_&PbPMsF^Q>GT2pIine^nrvx=i>OinWcMt|>9HFow z{aKOO6K?IyG%+l^lsm=<*$5LYcC~E~3AL0G%E+aSesR#QWpLc07I_d|a7>|ELGhu| zIIz5xB+-?21Qp$YdU_Kuo-l$6(|r_y2VB);?;1D7lD&ZvqoR&&>V_rRUCyQ%mC#-r zC0D{tgzol2I}&K-2j?pWL~f8H=cbDZ;DHKp$mvjXdD$G;i5BbIqtR^Hgm#2%(+@aJ zlW19QcSg)mBgSgVqCMRl0<(u^hx_-?mjf2f?l&l_%R(b|grkV2QZfCZ3xS=ByFf-L`ms>0A zDsRnzXwbwUsi378OE{<*V{p{U(G~(cM!)Cvmnd1+G-Pl@ieo|f0 zG)0b=bhcr#eSRdruQ_LgCcF);p!Ot=11S%TFs(!Uxe}$_PAaWZr|bX)F&nC3le!(% zAw^cp1UI5OwSgo#(^4_c>?&DkOET7?sB#bCCJ6=dAk^Jg zmlQ`d{rve@9#I>b4Hb&?l(35r@97%{59%RJd7W%hVr1&+5G9T+W|E9<5Y;OhZW~OH z`p~YPn#uXf&bh3pnuZRympSky{TwyC9~A*NRy!&h*_i|urIZQjlFme5rWHb%&op!l zlK|-HL;^|4lT~fSf6vzs2&w_Y;Y``2d!`LPl$qrA5nx88Lqf%f*%0Y)s>n@%Ccs#Y zM@Etay&kccC?oczaTqzili3vWDn*uxNJIOiCZwKkpn4rfWNuX7WKzegxSd69y=627 zMV7KGpw68Yp;UfLBxD|v!RD@ns6L{Lej&NNh$;&^#U4^{%_LDWPryCyJ^aLyUz zd@>WLot0TT5zWv(JvudlogXWA&RS|?wDUvX4xB!JG4Ov^uZ;hEIUI{m-tpiev{qGe_~qY*T|C#6LhM|=f#v}MkVk*8$*QbmVPRvHF^Dzs2}ZBJWsUonBz zXBZdND}(axMEyKk(iE%#+osix#bSeKmojrc+T;vF0c`2u(9Di5d8-H7nHR^sB?1tx zEG?8MX1Vs1q7EXX%j=!OI)bS-ay|8a9U_I*3iM4xHN`%u?X=JP$t;OQgHME5 zIC`GkrfxTkV5fSj1Q@C}ZnpAZm9f6*TmTR!6O^ppL1&?nNd@5LbJ$8=udLre!*L`v zxfB^o(o|qOUGAQ&1|MpZq#=z1b?gogxY?i6;7B07jaIGHPY4Pzu^g+hkFfMfeo8_C z4(M!Uu9Q960dCK^WLR0j1PgFGJ7(wL{XVp&B|BYLRuP^OGfWysf?Cq-gfJMAYIyO6 z18D_TTBu`wh{k74)kECbm~+mWFjo!C!Mg_(g}k~W19k^k^+kUMxDMFUS3Tcb&L4A? z@SN86$dHVJi;i0-{>*p=9NoQ&sm=O|OnJX}uhutKQXv{^n`K9$2ZW(Bfv z^JHBnR7U}Eu=;seoC?PoP%0VRD;Bh|2f>+kGgsjN=Wz}sp%sdX5OCGuBK{HSf-`lf z`4L4AZh&ld)_2u~n(GEw;buWL!wR4UCWx!A%-UIJ9MQUQx;bKpi|-o@3aY`hoMZqT zoUl+@V45kWLOH4ou-%!q45CtJHL!{bH_a&&`3QJ7aV`d=!G(n}c!V37wHc~H4M_qL z##VMP=UVK+c6&I;$T_sk>X^ zYSS_LkrAlLSR-wTc7w2n>gr%u2x^n`^y91egE}|@l<6?3rDG#VA3|*h_ytOPy}?ma zYEiXK*0m@b8%@d@`qXf4F5J-Sb)oua!&E7sY7C=eO}!=g7__pcR@mI?*Hk&$5oHr) zZmtPIYL=(#J1l9P8ez6MGOgB87K|wwI!Au96>4SxcrDhJnQd=!9Nm;rf zltjTaiwY%?6~k~2D;(t^pgkpJ92GvVX`X1JF%^|uLm8?k?1Il43c2x7IV<3)&t{XI z!VXZ6*2hvQGNt)sLyQpbp6&`#q?Hl?Strb8S>Ogk_mr-2enbw_Ww{8d$>|zO7npOJbT?rA->W z-xSnzLUjXOS+fbFHk&j=s%V_4vB5bUZIjC9DT8!Xt7tR)nc=3fTEmnl1p-rLxdgPM z4T3@p5+!*|M@dW0r-Q?*rKIsdm%!9A38hvwG~j0Q18B0ck!pnNy{&EXazN|sWJ9D! z#h>rW*(gJ!lVPj}jcTO3{D9eJkk|4 zyVX3byfZF~TFD#%Yu*NBPpFY>eMd8%*zOz&2wl0>ik`4!+T0B-%hF9c2Z`0$L}E4f zjgt*i8co6Avt_ufx!RFr6H!zR?HK47(=$@i3ZqFAobka(G;6*wj`yL0{Z^_H3T4-n zP#GgSOQklFF9xEr)2-ujpMcS$%QwoV2CHzIB1H+LwgR#wo6RfKatFqoO+o>y1F3T8 zEL{~j!kCdc=oa9)Lk&u#5LxS(Ys6;f1XI2&P1|4>f~fo$wjzLH=u6@eb8jKbsm3>@ zC#7CwwhL_J^)u{KBN?GKBME2mG(&lJIj{-hHGqAxNQBf^3HHV+$DO<~hhwygSd4$f z{jpGc#k904hBIg=0+`Jzr#HgtQ7TiDzJh8V_QxeLQ8Gv@Ef+({Hn4?;7RpE}r@9BE zteA7f08#N+cW=2W1na?iU?~g z))5+TECt88*;bI-53EO}8XM}l^g2}}g~AvqPlV#ofS#B)SkHFX+Z%JW zve>M>6M`?vD2sgm)*p-kWNGGaB&o&L**&8` zQQ}Dy6qOwvXq=ic_cYcZm9Ch+dO~VjtwGTr zvq>zI!#bF;T*<(q>~MEL+ucG;!4TMz>M4cE(^RWW3*i!DD|N~Y!3BCLRos*<52VV~ zgnkXY03WNWmDO~Wc`YnvK3nZZAZHpaQ$51YLU@J2Lr=R@6iQl5p zYuEno+SGL7*3_-U?djhqf1mziCY8?IN&PkZZvhB`JPv_A4hAn=@aUs}`G@!KsRnep zxT`uVQhJ~{UcYtBH=rD>7;BBjT42lnMe6J?K?VBK6 z(gY>~gSEyQ>-bQbM?yp*vFkUZq49~|Zd||q+b<&?_YdO$RY0o0;p^eibIx<#(ea;0 z-KS5T`rrTk-}}Ia_p-S>5Qbc~0=Inel4Z*)NiV#-gY`en|MTkW9G+0Pw{6d!{qloq zt_D@#a;6(G=IbkRJ(aDF9E3Db&C}w#hrnJ8X^_OT;amAb zOi-V`7`-O-!+=yDLV(vBoiqYUv<(49K5F z)pc6+8mp-$o3YOK%3=_&cuc^WX=S2a77dThX4H_Ing#7PVzvgS!G(f|%PG~5Z7 z0t~t4lnI~|_oYPbpkS*njIcv*PM>4>s+$tC-R0{(*D-cJH-0P*we#o!T!x|5W$h4)kwd2)1?H-0%wI0H{J& zefHZ`pVYBGoqy-2E61=sOKv5I4!Gd+(4*^LX?wfq;@~xNGEMec4u?3IZ|6#}M`HN@Y zePcgwb6sQmxwH0jH(?ZZ14r0EBrKi3{q-9!76&?4uHC-3YQu8E3oBPCzB~QP-?=BM zcWaLvwcEbxIX)PS{SA5o%WOY#{MUHm{(tvk|2+n%d_^I8*K}aeeBSWQ=RKOeM|O%7 z9iq=p^!<>yJ$0w_(ZzV&@|vc#Mai=YyKr{$_jn{R^7oC2hkwpiP$?|A@Z&@89qr)q z=oM>l%a_$+E1#)fwgydFUbVdBQP91j+Wyh&TWZSwJpbViKC5@&+fx?9;g6J-o=cG{*V2zcpRU^2@Wy_<@r&bM^?mEU z`3Q!o>c4z{$*O9?E3z(kerwkhYMc7h<$s?jU%qnndfJxy9TJ)1uwE{D|1g!yug9iL z2S--!esRg|*wb$tcI3W2J+pM}{Q+5<;|bcvM=u=yY@9{J$2{J=7E!^S^SZEH$pA(~ z$6x@19SWnl6K}MXL`U*GFGj4%WsFUwaVN-OvyV?CC#;`-yl?OO$DAX7EPi3%vB7iB zkxSPWyllEM`v?rNlDyk=rLyhVh3Q98xOJPTq^1u?A7iRd41Is;vh(XNv_?YnWbV`KPR!PG18}oRQ*Z`}c0G-q0-R1HN*|4=Z}%Pn`Ph`|rQ|+WzIoO6A_> z7A9#mYT{_>q={|iq=K3sA69mEy4CkPTwbFa6Yx|WO_e`(HC6Yf~|-%rPmSX{TU zAN|ujPPff2qi$LIbmrUvYWl>h+0%#iDb9hndsazL#d5P(FWE#?d^H}z@#e}fXzV&Fgj7Up5k?dy({J{*3yq4_|6Abj=bGY2;<_|0)>JM@>69paJ;V|wy+@ed0gd%R+4#{YSpzrFH@w>La;t53RfJs^`j6Prb|#3~+fZg|4kK&j8$ zoyHniiBHy4n;o*qh$)YfKGUxe0HeTYe+URDZPGjE>J8q>1Q-*=F%!4BGUm8kM3o+7;_8+h3I)AFvu?$BD^ULmV8_D=p| z)DQb>u2$~M;XfMLbzpH!MNf35rUQC5dmvhnUWZKTo#&$j;r+w-k&u=6GG zm~!)qr_0v}e!@v??vw96m+gLSD|xfn{rBR{FRx#C>9A1P{?*NO9}fS1C)l@#0vi78 zc;GJn;MmAFW(neg{(wN?yi?A0J-W6*ta|Cm^f}hTxaE1kjo?HaC2{>t>YgfPDvzFy z1-|Y1R8Ut{cJ*^b6LhpsxgH4n>(~d!KX{3!5OOucf%uxYR1;NoAN;k3_3hkz2_AiO z@XMX|y~m!vKKyaz{kxZspZan5>Jy9u7nX|qL7a~VZ>O$aKKs_PJAIUgqw@93%6`w( zz5Q|U+14Y8jjtZ|pL~n6ZsPqG*tZfPwQThsv9a*Vq2Guii}#md!|RU^%K&E_J-eSH ztyxlc;Opsa=TUpV`uV?yzkYRaV@%6_Y~-+fSM4kMFMZY8VxrDIHUpX&Fp5h~w$pz% zeArG~wM%RInQS_LcaQU%Q`g2iw%~r}qhY^)^tRFXUiaVSOW%y47jHVO+Jqq&-rxQ@ z^#0S^&n(1poi`$Zn?C;166Z$1os)YNAGLpSK3C28;;%KYD9%mvkR*p1e8pNa45h!zVUBwgmix(I_KB0n=Z-xn~<>w(zI-t1~ALyrmFt zsbkPrRbba5V3m~BYiYaR+J8EhM7&|R2-*6{RLQ0fKl|!NcF9KehxSW3=T&I2?L}_SqC>2j6%je&aF7oria$qpsc~2Y0{Luc=zI4y7op12?``Qop{}g?sV%^J<_K!dP>YEc^4V^MsP9E7W zm%mQ0tHa#?@8Rw7U#<>+cW$WfM4#hpTmO0APuK50D8p{rwsV)_qmDzz%%7OPJfi;O z(0;+L*BiIeNNaIO)YA(~pIW#Gjzr_1eZGP9y13(gP0v@>?|fta>xsWo>4%_P?y-me zX3}?VCFT-0rsFpv(Hq4s>E?|aQ^`N4lmFejS5gXF{LG315Q_q>e63mN{$uj)X3@UB zKOUC8bl}LhH}5{V%xedc?gW$2CX{Zsa!?&H=bi+$JIefuh&-*oGT>q8>p{w?rT zmG^GsQ(oVcyLd9zrmc`jbV&Z5a`S3J$uGC2dp^DJWplHT2wk%DuPc9FcbxJM>A(Hn zd@K_D`LU{{FMg`C{kH=5&gI`9@Xy|SYLETe6FVeFyuU>L{P&M!!awX#OI|8ZU;dQ; zJm{C9kLak}s8+IlX(2h@uUEb&Vzs<0n4O1ySWx%2 zeP;3VLcRU_%i){9`Ms{LcbeDU^{Ht0`+4P;-+yl5)h`e1rI*7|;0yp#3a@Q`@5HTV z*yh-peJAd07JnREynOl2_utypu6UWU>6zchdf#68@7ST$nO`-vW%qtKyzk8yF|dUT zALg>lDxRwUsH-RW!tSH<5$JqwA*A9}ONwgw zcg@SMvntarAQYtw$bUB!kyM zEniPQ`I_=$WjhBIF~450?rGkH-MFuJhTadXd$;H0QPEa_bxY#&)@@rF-;YrBe^7Kk zBp{2f8uwNE_Ywa(+_IZlT_mSJ*me&1tZFoN{r2ULnYdmu><2pRk+bg}(!JVf=sR>8 zwecm}Q-roZmc8(4B%VjF2F(8P!~4w}{_a0m_t)w7aQ8FUT@y>HUmRFOIPj3Z`J;`pY*(?wDB zlL^|^aylPfT2}`{(8~cRECh;$WKbpJQ;4Vs03A(Y=B81w9107?k}Im|P0S{lT&OlG zP0r!5@Z^kGpp#SZB`yUyJ}gS+`pV?YAz*mKG&}*DNkN#X(O!nTwXz!8Rg627v{W}4 zrs;O$G}ccvQMmY;BG`vP@%A-4d)o^rW-Qn}nhc3xXk|%??WPc0O>U|gC8^632$6Yn zFJV9-n+fOybc!!GXJdJJ_3CsBSHVa3;gF?h0;bZcci6Z!bs`!~C?Yli@jcM?R!Yd{ z%VgMQzy46i0qLH%cE7=UgWtMGQf&G4renvyI^KQkv#u_U>Z1?dd+%NGzI_7TE*^Jx zt572EP#a8Nes%JH2hTW9ecfyAK4vm>=`|mI*zsPM&=6t+N1!FiIt!(Wfi|fM6{MV! zPsZikhM1xSgr+L#N^K+v@rBTs33a@A{7Y#A}SzZXj9GtH+#9(9+ z5$dWB(yS@zRQ0d{Z3`9p!z@i-1W91@_c})t^8iF$Bf~gd!RXG{aXUwQipQKPXP4S__+j6es%Gf^mK&cy)Ygan?00raJIXsgBoA+*MeMr~BA5OJZTEx`nq zgCwP=$@+2_zpq+WBN$-AEr1?-wlwLt!ciJ?Tg$uvi5X8*aT#va(6pf*TGmH(vuM-! z>fB(TSvV2`NV?lxC3&SBZk)~3ip}HciV>A6HcoM8;(*4v2oF2qBE@Mf>INQo(hZ&t zL^F6~-q~yE^g#l{Jh^Z*OXdoO@q9rkz1qPO*~s9Oe|pLp^!BujIk?uzK%=r`FfX3! z)=Z(jlURSEGa?=+?VInCbfNW$so7SEm>BUkMnOcohi{co1(hDe{FoP@1;U#ADqi)F z!OWmQMq4Fewx_m~Ois+tYZ|0Y7`Ysy(8()19PLG1K%feY9p*Lbd?k+h=1DQrOzs0W zbWe^&Y(%M-RhkZKFwCxI0G&jUk8}q?jB!qH4cBi`q)C7b27`!UqeX7wxEcYnMqN0c zIR~bSz~FKU#8@C^?R}%uTy7_-&gg=+3_9GnxlUhmAR%GLrA>5Ea}{6!FbSR^+oU}c z9tjQ*^hSYXdRH4eU-Z;#Mp*N+K8}%8Z$+7l`QiRPjYI+hg}Rv0@lb}%X&k_}^~}+% zB~EuD#`XI%)x85F7US56VpLxs(&xk`tv`%m;%_FT7 z1+1ge(>LjN_23BtC#1DeS7#e(>=88;Xp>^9DHHD1^+(vTV!)%t!R(RQdD4*t*yB>U)WNyulbp^-TsOtZ;79SEHX0mt`rzNn64o%;RkWZ)cAV-PbP^k+qam1N2e9{`CrfFr#d6C;uz?EtRK0}e*C8uCj z0IEF_Ytw;5d{(I;Tol~8H5EQ5uVL67NtE%>9D!UO9VhtnxVlaYj+;)46ZS&4Pv#wo zlqz|{!@8D`R)a5fE3l4^I;MzCqxzPe|MMzy`;Hy2Zriev zNZdf7ke^+%91ebX@6JD|f3x{Q;a>4!bv1Z75cuWSiBQGLl`EGoL%Y`TzI-01qD)?%z$_j!sBf+sze;_b+;oMo*_ue1(A3b3>a`MCh#Ua6l)oT`AzY=zJzOUm} z*OC@a|2-Qi)=S@a=dO-{0X_;_UnIwT-kF zp1zZwzHs8$QOV9%Hg7No zt*Tu~Zx=SozdW$VGI03NfYSm7;QKj9PbsJuqfP5|(?)tY&Nqdd*}g7TY>H_iPwQgk zy`wEPQjMrGt?=Y4T!w~nU{qHG{BiWW{IFw~fW#T~g*Hx(C2w$y*aWC((MCEJ@>o5f47=d4oO8rCOIjhBc%f4p&$rH65B^o zC>&izCkyy3YAvadoXnx|v&s2%I_fm*guMD1T18c(K&k0y74n*e0=AS*D9KFrYXwBF zdL*75>(SAnkv>9n&>6^aoZ%8|ou!|`q_W%FxJ+unLrxEk7xI7#@QA&;7lRqK8B{7o z6Fk<_ANCtah*n)7T`n5#>mD`-M+W3#LO&@d0}QdjwTM_eKT}>$rno?pP_v~o%0SOW z%SPda8V{g~7gj~`Bi&#!3}+gRWRaqFcAKdo*ewyKXjW3Lwcx1g!M9l_0r`1I(gGc3 z;8DbWWR_;cjYFb@;s%$>$TTfk#pnY=E2&kWsh}8=5gM`KVR%jkN>2Cn(R4i(QA)ZW zW*Kwl_?&d446By7=!4;4d8|Z-LPbrM_Qs~RnhIf4mMYBUxs?;u9Gq-20BIL7>`rMj zSAfswx~lF~=zb#8C5u(cGib~vN?$!I(=14YxCgk9a;RvTkvI2sPm%vVr_3aLTYDUot{b@g~qVyvr~ zf=Eu-v|6(#j>Hp40t%z8y|s?PD2L+#=|Xl6REDVJO9k~*LKzHEU0I%VclDaA{q_;( zNCFJp^TB!Wvz3p|B`yy3eR^OwzwO<=|A>v+YfsdWE2^F?OM@c7yo(otoO5nEkuB4&^PqnrbRa|2ga@> z(hHYv+xI?cF8HJC`*S~xUcEKt)od%dNQWCD;eLvL1QjjwSc@DzOVU_`%c&eh5VkAW zl=RI^GF$QXNqZf=BOh>j20Rs94Kf6okET(etVG;3=oF($l1h40qn#QYs17!dwZ`;P ztw5m>0%u}M9CSpZSGTHtF)*oOw4)UhfO4%VM!*mB!%`5lSQdqMR$3-`@)G~;isx(BK0cvt+_-)* z1h+$CJoODGeenwcAvln|t6BgJt0qQ7ok z>*cJ2Uishdj^uL z_I>&5Q?Dw_uAA2{TyRL%gU8-MhYmj_F44ci(9ks*}|R!j1KHfkvs9@b!%n!B;)5zyG@OOUUu&<|jRZXJKa!70U~a zyrM{Bc|~wyB=Q93sAF=?b5%Q4J81$Ya5i&MxP`godhnXd`h{cIe`iCB>X+qBZJ*uv zW$0_8Xz|q%Z6$nBVfy|W_FG@w#cpW+au&-QM3JnB34QsvIthlri)v3sl^ejdM< zn6KFcTS{A;x>e+gx3$%+KrV*Pr|#WJPK_rM=Le?$JpU8y$=o`n^c~L=Q^VPZ$3N;* zFGZl^)}gQ8Y;JvVErrwg(w7%KsZDPbV-VkWoxk{IlW@%q2>eFq(e$r5_Sj;G@qUQZjUl2%(IvpXD-r&iX!e$ad-oLjh@v~6#P-s1SCuSn~An?=XppPf40 zr54jxRXjH4)V9`?%+CjV$Ne@(*fRkv5>|}P66`w0A)76@=qdQi``>uu)8ok0+*ELeD;gNno8k<4~klX$%4R?p6jYbqXI`jEPe^v?0`Pkzw+YTE(Pi!TZ~KN9bF z<+Ux4e+o~R-5xZ4aq6tw(b?Si`k@~mSxo{9f~-K@OwnpfBGxpBY&rgzu`7+45)f;- z^|D}O0uKtadAy;LB%@Z2Wx$%!Bvx-T%2!BG<-T&Z#yaF1?9~XWX6&pfNm@B4$;%U> z1UDusvGJ=*^6BX`xC}{dYBhUjP#9omSVXDPa)Kq|;eJ1?T@>`7G|YK2slOUEFwM-+ zTPD>G6vp-cw|6}NIbPpCwAL(1(pnnU(z4d9nOP&WG+L4tNs=T>D%&fIWOF|lLzt6e%dEWOev44KweYbCi_qq3;d(OT0x&P0(=RTQQ zB!$Oo#N=z~WR^y|dR7+<*UGWdv^-r_cu%hUvO=C`r0wbA?BR7aVM>h|Ms1ZI>e_`P zO(y0ioennkbvHDRh)8RJmC4AY-Zp71L(GGtBU6%t9WB&r%Zd`hQled~Oq8nMx0|^`$iWNo?P_XQQbs4Y>5dw%X%|-PVzAWP z%hSVSfvc;t;~Y0{PfwrV==b;HBhWwGd2+xDdU}I;|5W?wv7)lCTY781Z16(A`&TQE z9XXJfUs}`nw6^}RXM22Gc3{uPALo2>`EH+Qb@a5gly7|p?LSfd<(FU9Rvs_N+mn6ZSjD+-8=D$D6I!)f>cb9drFm#y zc22ZbQdKW6aMsTA^sku+bKC}vQA!i-hge1=7+)NIzB;)eUwe4Z;OH2=8r2NzRAU`; z<&j1PLoD(QLiAMw6LSo&=GQtIm^x{lH;S~(sdo2NX|qg~(|1vW#1~?TzmBJ2SXu0( zUN}VAO5LqcTU{-rt)NLiJkPg0)>Ki$O0~%>JT^Bg#wNmiqIGhaZlbNF(m1&`T_5=( zHFpCCy9zC{t8V6_9nA}(DjZGp^Q-e};*y+3_!zm=4lhjAk5930PH9gM&NPts$n-IC zDsA&oP}$P#);N4nQAMftAib>fR;j*8=Jf`*y^|Yi3RE)hX=x=YT!^aBZf$PW$g~OU zS$oyJarD)UJfn73vxv$(W1lLojCA`V>rvY?@{X349LX;zII!oV_qQje?8;2twrSJ0 z_qVOzxFzv}J^7`l&eWVdx-TPX)A|itQ!?{TRGq0gbGjPBL2mZ`V^v>WxmtJWG-Qvm ziZd53;%xh?$`eHgbMgv~m4EsDPp!Yo{R#o&)=$^3oU5)parEf%^0VLG?Acr8*%zLF z?zw@gPyXCg|IH=*@t#W!O)c^oI{G6=uM7$F4_X-(z9x2Ca>l-bPtTk^efrcV#|rWf z?a#^nc-O97nY;HL{`A~84Y%4_e)y*L(<221hYl5&SDv|CSO3GchD)_ylogj$oGL$g z@!QK+TUwibxN)ng@n-Y2?|-<~*wm(=+_&E|&uQx!7`%kOsH~{iqxt#|bqy`|<=Pe0 zp6RFdO#eQ8o_ea+y}P#?uQxZ}>Z$yU_DBmm2j|607P?xQI_8EcsNA!tFu(0y)o5s> zs+MWhkbOZ(x1uJ$KB_6**EGK-wLnkKqOi6-Qm*IuW?xk^L#@^_J-ynH_&8&aj1271 z4;mX(tJ)`I=2l!)l8;i9SCKp2ta+uOLPJwE|5|XmYizPn&+^iHz0F!Jlk~OD>s!Sq zUaa$I8L92n7*eP+y4^R`r#7K7u0E;OTvIO2V|20BmeXdb=~ogi80*EQ_$N3w)`hn> zRX3`8)&}0b)}E=5;p3)SXOh7P2D9XYspt}fc09JelV zL*mZ7;_~V<6-V=Sr|sArzhUj_h}Gfipb^Z-E3Ufm!|fhVKGj#XZy)8Sddaoj{Qi7( z>5;sg?A?106`ed^f3xj=uV3%pzVY2xXDW^t6&yO4o4q$@-=UIIU;X4Vcm4uTzg4S4 zyq95I_IrzTW=J{h-YB z_74m3U+CiKykOCyh0e2QIygBw+u6)m;OV`1k;fvp1&h1`fT$e*yu>V zC2r2nUO}tZtPWf{e}<*ibi28WmIcgTw8YD6$#Uek!avZ*XK8?6fafBQHLJtJRtB#K z2=et^yvSvt`(m%9OTD}nEm<18dR@c{ulbG(m!Z;q7cW@sy<$a(x7%!+snce=EMC0G zaoUt=&P#m#ycaK68oW`p(mu9f&}ge6m0B5=1%;P~L^+;U(d#|D!LBadthk^Ip8_my zsvT9Ld(cKz&POFM*R|OvB)iZ*a-_S3Q`I$l>q1i}jokK@wvsG+>z1T?tC3n5r;wzR>76#ds5rr{!&FLZ87qZe4^sH!ZJLOaFt2C+7o zWvRJ?LM$_)%QXv%i=#bn>&En)sGr?lQKKEK5iOS(8LU}XU*&MoP(MsTQQk}8d{Tt^ zh$OAZ%4~&l6)UBdaG$t~)(O_7I!+!HE)YhPF6PChOfR{ZHv z#n}ay74*$Ci+kTr%vSatViZ%PpKWrxx^`k!d5E>f<;ctGxhmCxi5KKke6QNIx>l-M zDcX8@MSB+x$vT~4n3&x#()X&{HP>>Zh@|3*l1OXa)?D`?SGSDp*^nQdRbJZSklJ99 z)ud_aY%!!c+oIN2udcY{in>d_wp~J_qC(Hh5reV~@F1A{( zXKQ7`xB{(+8a3xi?MOqpcvHJ*|5W|t3xRgZnOjm!^M*u4J3Ecb)+y2rY>trkP^z?R zt4SQC+?46ZGEj8b5C34TBpn~w9IG+&8+tJ(nkV?_t!CY${Kwkcu-rOX;!;>j>*B(4Qe*ggD!gMX;wzM zI}caa(~?uwQB@mZ>XJVwsy%T~uJx5v^Mszk1r>G886Kt=CYDZ2Q;cpfZcbDw3D&4g zgXkHVQ8!}CJ*P60e7%T9r!xJRHml(H>>6XmaQ!@=^jf`eXPv8IgQ`#K#oQWQsxT$Y*3u=adXQWBg<{XhD~8_Qj(Wdk(R5FuIqYl3&&6lHqM< zmrz|BGFthbfnk-cv#q_Wsj;)CwJ|<8rLQnZSy|g9;YvfA%{6x$3tMk*)86@`^7Ial zYe?}i(pOd1ciH0I8mZq>drxE3X#C}$Aqkc$RT&4XZjUr#i7FZ znxjETotbxYyiZtWX7hRdar*VCQKgM4r6xw+2}wSt^{pcUGjcOyeXQDx>n#)1nzyLe zx7Oyhm`1b)x4M-Xc=#tIZ*em0T^ns7Um7`DMgD4DRsCf}kL)Jnl=H0`BemPlXWF-H zvFv^2LZgO5mWp}!Wx1Ym7s50$4RZ7fA{?##EA>-M?-tqb=Q2>8nW| z(G|`0?RuWxF3$cQY5A=!x)Y<@P3?WM6iu_6dup1cXv7zm-_slwo>MNbt?!?e92T8- zS1}G%L+Z^YrCbeP`zp zy(?kmm0jI4_-d<~iK$w1MN}|sJTHv%w$!V+D$n_pxoRC|Zl2Z_=r*J;T!p7VwA_E@+bncnwe!*HzTgNi_bZ)SNu8LD)u(@jL z@ZnJfnSs5l6ivN+!y~K=+R`FY3Qk*`*L2RW%_uZ8i%f27F&Ld%)^nU&d`kJX-Zj^n z<*sU%x5+gsH|jO2UQ@P~kF)djwvAIVOEjzVSFa53=~{8MG|xw;$hIL)FUROEKzmN(M@Z!$!b*5H82XF zn+xd?CJIV=1{Sdg>&F#XT#0oxsPw4Scky@DOxJ|fYk2P%?Q5A{{t@2(Ik!iIv?fd} z!*y~cZ#mN|`MPSoBO-1)+P5oej&rW9PqT1{R5U8Awsi;z98{p>VV!N^RHkRr(p!F_ z>WG*Yvmx0%O|%NNi%cw=Tq4a>np#@5a`LXEx|`J$c{_WwRhnLuS3e(k&lukXFtT%U z@$?@R9$bFjsZGUAu|y}y#iud3pgvHeu(r-@w8<8gHtS-C_Ua-7Pm{WqKsoD%E9%vw z+~SIKO5`=GuGY4U4lKxy_c1R_ceC?}jNh~=E^Z@(*w_slVq(^>kB*LBw=ODbEr-Z8 z1QCGMtFetC2p55cIy}NzNQ@7EM&nsct>>N_G@VIsR&Ye~UaPQu2WAHPet?lQZ z?-6i^P$EFX-Roh1I6x0s;HWNm=K$Jvvk;5gTK=f4x z-(LCVGGe|aKs+*|SO(y_bn)v8U%@i#JeB4vy2%2qh2&Rz?%cVvHGnf`=y~?+*%~N0 z&YU^@MRj%c=lFeb`ZR(-oc;o+&k2D10%#xvRF!}xJmEg)bRw2gq2w5r)3TH~HH-TE z^D4k84)F0zkcR*Q5ho*3MnZ*Pg=3{-S+KGZ{xonLkPUf2NNn7N>ZQ*qLJZiy9>a`r{R0=9vgDv^oaq8@u@&_lDvGc zUgA`UF-qg~7t97=_!-B=_wKi2*!}uzJ537kV#wV`O5$uOxYF@2j!jAl0Q!!=u*ib} zv9S~$Hc!*^!9dJFOr9qr`xIjsrO4LBbk4KtkSu2vN=p-)I2Q_&pTq&4RRL8JL606{ zZL`)9T7a5>nh*|eG)^R-fWtX^pFVw*0Vqtz_mh;K6_YJE%qM*z(Z}Bd&#qP_F;XCA z!A^7nNr&V_5+28+coPwZVyVN3z^HrmqB>!sQgk1rcnAg^Rpxa& z5{X3@T#D=}eaSnC9ZDo3SHyL2c}sVAQnwRdR@TMnpsFCCF2gH^cP0vp%P?Py33HF{ zU6@z;knMfk&wC^dzF&T!ZX*TsBMwp&d?J!uX#|A|j~qUHr10p`qN7I(j}#UjEjmVk z5QD-a1&8zV^A8_BoPY4Z{@i_loV~bI^YN~XjGgJ}X&-)&lJfrh$;msClD2Q(o|w38 z+qT40pJO7VN$>O*pI23$1TfQ*FMvCH7VE${O#WjKsRn&Qu#tv&zM1uQMJ ziqslIdHe3Wt5+^#A-Z_^8>~%rb@lc2SnYr%_zs%V zA7Kd6c>OwzK?r_KOibL0TR^Kq{oQQM#*Z-`KXJ;mX*N?PjvYU7vZd7&3v=t4b~9$p zvYj%?!rFGa|m-VmQ+}!<$B|-wiQ@7i<4|vW(SdPxg$=S;w2hreS94vMpVzLSL=HwuXVpw^= zh-`8o>JUUh;DN*w326s1PWY--VPRpRp(|Ie3<+655E2ryGBh+4*sxUqF5tieD$p@v zxB%jy{{TiUauAmc5Sp8ItNN&q1#loKHO zv(K0hj|&7}0r)-)qX**$a9#lN6NurCE;eoiv_YZG6x&MCzDfpI6t-Jzzr(gSe3Jt& z4Q+;j_beOl80TUOqLI+P2%%Njk6M-CsBO_-d6&T$Dk(W}q6C1!#K%)HjTTH&08C9Z zcrey5(rB#FkUI_*`kgp|?eF*B*Vq5h&_L6`Pe1+C)YOD$W8?K}KQ{dE1MniyJOL5_ zxz~RD5fPjK_G=UYCW<7!`tPxuW_w`T9%G`C1yb!9*!~ux%BQ zio`?#ehEkd)Wn~DS_b&!lTS)ZOHXtGKr;Y^2%u*@rMw)yL()MgElP?3#iP@F1}Mkt zqvk}w#L56T5rn84H<}4<0GOxwCI=CTDrX?X3$aof(A~UA#5bkFYD){^3CE!gK>Jk& zv}YA6M&jYKJ5BItwjUXw`KX{&NH|y&&<~i02}I0aUoR{aw2lBED@lMD00`qr9Y{!) z(ndgdcla89IMs%0v`(b?k*iA&yo+PSx-ut094m(}C&=11y|ka?$}owI-DsNo*uwav~%> zT6s}ERzRu)UOZ&ZBup+-GBOfCsp46E@%rbvgOugZ)5Q}-A@TA%h|Z9mO(6xFiU16W z#!Rp_dD|3HF#dBe!9nRR>_7zAm6`eR?%i2gd-hL*u3~T_?Y~BOd(3AO?*BUUD!;~qO04QIeewoNrHx`p8Pqv&g#mdTx zQ!p1H5KgwRu;8j4QqhpA2E>*i23X+-ipf#}L|RTkBGyy_07}C>)>Dxf(&Abkro|yf z0_2Sp5J6a!N2(EmmjP5hq*IzeDkD%JfRGGekkC*ejT2KJ&xd#~e^`U03C) zN*RDSMj}}yQaE*rwGg#4wHFehSEH7uHsP8s9%L9qi5ot67*HoTLkz5ouvml;#eOV? zr$$c!B(Wl*qe~4UHVccPu-H_rukf6u4TkDuO~$e#W{~ryl=K9O2OG5;14@Dtr_89o zF$au1K>Y)HJ1Fs}f#BBGQW%L!9pEU6;TQpON1vvOAl)Wf7cPPjd(rsJLvKOD;0&dX zjzOIS9@H}``79({bPS3|2^kmbGrSpCtI!+Mn|0i|W4HcU*MLnO6AYNk_IICQ2l%M~ z9Lq^W6w;}vsF0*4B_SV2!}BTa9kFEuG4Dx2?{arvxX^8(o15DL4sa20BNSoX7P!uz z@4{jJd{+Pk=Q#^7Z{9p-{P1*ko;!E0late2C%6a^Qsfg{@Xd3c>ojMM<7@&fAOPld z1k3^8L0-hu;=&-NS`7AC}n|kA5wrS1Ui$$ zhoKaaf=qy^HJ}z@+7w2?bT%s1su?s1fF;1yMNG#5*_a_&0*4L}FHi&mEfp3?Qahs!NtQ+$tW+EiEN^CNXBJ7N zVd+rBUi{0U4#2orI=m;ys8x6rD>o7&y$W{ChzB1fh*DGU!Csnmf=mu@qGLiIJ1G$*KZcPAdlCnH6cIOW;yFAS^IDy}?2Tj|LMoIex}jKVl!gJlkQ7i>I;iA_V=@I7G$?RGCV0M3THuFhl&H1$e9X9k|-x4e%2f)s7S_n#gjc^ zAZ|*No?RqBiAW>GH)F3P@Nx>t&{gis4{ZAO6+FtyQXlvJM!R+qO^x$}v;ldaXJO%} zOA^P!LQp2ayb`KMdV29WtwXnxb_)KwboSh7>Jw@}h-V=jZj0wvefmhwOsRh8C`&xq zV*UpobTZDc{D@p6U?F1V6-;Pe;VezTOAcdT;l6$GzLKs^?=A_*ju07SCvS)9663l= zijh)RN+#kuVx*y-=vCqnd6diRgFiVP>y|?2mFc3rdrJp^BxQ={-JZ1}I-A(gl39=S zHa-vV(Z>b{4G>{~i<}ZxF$uV=_fqS#j=-5{WGPBU8t*%j-Xrvzj`SiQcPS}GCR5r@ zIT4{`_;&J$iOD7-O-XDPajq2X(@3x1*7h@OmT)Fa#uWE|!Kpa@_vkTWy3O=yw)PJ8 z_OqRw9OulNKhJ6Id4h5*jsV*PtF#*BbiD8*hysGujA;nPbL|A3t`?TcZqL(SPYB zgV&74jD36jXrqya!(SzM<&{@neZ}DAVMBFZ(AL&{QBPlg*svFMp4S?nra~W`RaSdu z;1HdmdN1l|4;rX}@1FPR)3#x%1>8GBM`=#w> zQ&aP;pL@#pd{Xg=Cl%2derdaV^T!{5sH<=I@p?0};ktXLrRiGzx8HsZvAgo*=arRE zL7YB!_QItrKVHAyeCt-*FJv{)qlX-qX6U&Y5Kpj40xZbF@KOBc9i*{@7{>sz96;93 zgCG$#m&vjq(XI;mR?@x#)-WY0l%`^kN3$qFH!JE{nPznjf0AyNq6L>b9iBWrAstbl z8^QKFw7VhA?Ao;~TkL{3P>Q6oWe1lcifeI!lg(JnY4s{xya@7#;u-sjWj@Q6`vL_+u6xh`(*i@kmPSA>Q~tc{M@uxaz=t=phU zhGK&$nV2#P`l~8ZSdoH?yac3=;sAXDX{n%!I0=PC*(aqZ=xFcw@!}HpEK*VF*H3kT z`ZLdJ4g_dw!d#Raiw-~^e3qM!s>6a5ehI-d>gt3-9~J(?ucxP{J9H?z`%A<0bgAdR zpfglwD8_(}4u0?e_W}ZJRA_5!KcBQ6(tpC%El~EO(h|09-@YT|{nQWB(lau4?ataG z3RC+J9>O6zWIh=ojE)*0Ju@Nb<6R$T?(Dq zU76G$Kos)1`XG+o-2$jU6uo`E-ael6gAID*mB=~m%8HN=u=D9$Ee($ERq@rf^Wb)w*!V+;OHvDIgzaQhM+GEneT$WB zs0~foGx1HFB;vB*8Dl7wgFP~Ozx!RxO$C%AqwOyf;<31kUoW0wG4b5@OG-L>b+A>& zEvo;RIuqj2>(>|M;tWEE2V+V?CzST@4!ahn)xL^|UVOBKLzO1nY(8EdG4^g}eTCwO zUdp$6LZ$;XYBT=ul%C-b7OC0ogYK)isM9K_nk>R|NX8@xbXRS2rkU8gx7j-t6%BN_s(Za%a2o)59%O?v9c;GPOr(SRvt|VbHL!$1t!k zrO;AlSYZ;|(v8%Iq%m9s;-jM~wjCy-G8F9o56-`ox~5>&v2nvXakjn(qP5`!Icqmr z*gfR@K3(oaF~e+cI@k5YtVQ8f$&-R*qv_?S*9~WRQdtjDPheghYLarkpMGzi>(LWc zK4Ft@!ecjsMvT5HG(73OS^X@Q83{Cs)`&LMv~63cSdq>()jm7yP_eR#&OdCN_ELl+ zC%f`>SbA5^S~&8gs?T=|Hv`pkb46di1p>N+b4=H`8<`@xbL&0#WGqSMSd?w~?j81c;r zPa))u7CI5$>h5k_J{t>*gOXlly8?%Yhc6=R?qb#4Fbfck3l9qmZ*g~PWhb=3W?%mc zdveZ}FD-257vAZ?@@J3Cdt)u;xv~$2LiV)QmEqf`q#N0szvTpNLOuN0toVwj*)sow zVa*lCqjiEu!-5<+BHk#%PGc=1F+SxbDp#s)d6Q*^0qW{C{;x02j#M^FfeXcOOk+ac zPM@`KmDcs~;=iDK_j@t#>&qQRI`d1U%F)ZGo*<{KY}Ok=OxmKl>X;+F%0R7hOqWsu z5L>s2K@U}m%63b0Qx>hbB6UQdznp%RcEL#C$7o|E|LiMPlv3eQ0o&%fCownL;2@72 z>zAXw=`hxQ#mb$Iu+__^+R54SlxBpjN7EV$4r@m-<->(98zO8jHwcE(Th4H3-fB9* zUaTB;6@JNJVq((jo{h(2Yz|dmZP00zVV7pjpvJ7FT1MXU;}G8R}oN zHBufWfF-7-UXy%j!RBM1H!5KoPgTS1yi;!#Z|vK-EI>@iT^>{JZ<73iJqK%Pgx-j% zGY_%vkd3WE>nkglaYO|~7{rt0nFCDf0Kj?~Wc$Spt^K)hmiFnQOy_Z#{e2uK)u~%7x{FaO%RUNC@Aw#i!&fXNSvQ zknsA2wG*v{YE7~kw7)mHgvO+6`7HcEaBgkbCCg5v-@dTjmIhaI?z3L)01psIOG(R0 zYjong+v#dOKInD{SqDRs^}yHn;xR2mVT+h4ES!lbi}p6cn-^IRgmAqX-3^iL;KO_! zX@|pSHQQhjF|FfUSPB8Jduz5#Mpkv}Od!Gvg~=cuSWeD2`5bM_FGONHc;IMMJoi&p z`8qh?#2~Z-HBCdEIOBhI^g~2e>BS)%m6yzN?VwXd#jZlOwaBMOm3)dSG-+a=x$KFm z4Mu#P5D(JC^Ta5~(y)f&B4S2>JBE9s^$zi&@4GyMuqTjVZKt%h$ePe6O#6F>NQf^q zTBER|FBQu$-tpU-FMcdGF)W%WO+Oh_ZdoE`iV5CR_D%Iz4oAnW+0AVFec}ZCp3Rr@Bk{nPLa8_V&xtH%J}P$G3b@pcorgw`6s)(&Vn%yCJfL|| z-ofvbSB<=8Y5szWMwpHv+-Lde_b9||U6JCFyy4gC>YD2Ft*f)a?{JR{wm%?eIPETH zA;x6*H%}>kIb_WXf2YP^-!%75Lens(wK3fKSkppGevH921wLUy7+zEy*K-~Ao}}FL zswqx5%J(u(!-XcdeQ64g3o@iy!n83tX!9u>o%9TXGA`EQyn6o1(V!>F6yrslPwT4Z z8lCbHYVBDePGcF}-uRyp%K}RhP(CS%=wDUjd|R(rIX0Q?rFVCI(%!={p-v-0wIeWp zq4r*L>Uk&`3!~RZKdhxF1nuKi_w`wamJ8S^$)Twa2|P!a(~B#Zjx@2j996=VnW2Xy zs5lJtO6u8sXH?sMEzOP)MH>@E!VC;#PghBHc02-(Pgk=#CO8hy#tRw!y}Yifala>K z`Fc5unmzK;XJINOD|_NO<}9A$AUg1+;6VRf_>Y~B^+OXw@-Ewi`P*M+!o#{LDgELz zJ<1o?m)=u^AMLgoLS~yDeXmjy87;foTA^Xm!KkI8Grd|`emzF>zy(8ssfN9-kvEr< zp%1KdYwnar7Daz1_gpkqMi;Jf04RPmN4oXJ-xUKUPVZ_`EhB z4nBLmsrEwZr4af_R3yJs%5X;)Om`r|7O!|`!{ucNtV zultmgi1^9_JiNWXr##tuFX4p0;D&l+zCKm{(8xr#}5v7F|6rYx7jZt z#M6sKerb(N!rp_gvHjCxgm9hTpdN;U8|Kelloc!JO4jy#UVxs0GJU80-I0D114Vlrx29pPSEeZ+DP6`_+* zdhsdbQm$A~f2uc=cFYC21t}?`PmmVlxAqTPYinE4*`y&KsiHr9(#zE_?(RA++|r9d zJ~-7ah_bTDXK*5V+eU^`7O#%Co~F?Y&jZZ?J@~S^D5A9^)I6>CvyIiK4td#(?`bHU zA9G z8GrZb@TUT@cCOMMXmAF+m%$zGtBw#u)oI%rjs=0JRQJY=r^Vt~ze6py(aTwSm@%bh zX4YgN#5Vv)C6Yb`tjjJ{V5BdwGHyU%>T1oTB&GOiw1s`$%MZV|y%4?y@TqJa9D>?M zeP7>%*8NDv=^5YyQ^9x~J1g_iPQQSjW71nT^%3n)qZBfzUl)|bH|))#+uJ_*RJt6Q znwdoLg}Lxyap2ND6-vhx2|fcn{#KdjoD;KcB=$vC{NXNxszARhq_wOwmL$mxFV}+AY2+J( zZ?G$+l+_gJgzh7aOT9XTgbWseOuEX}blQ6z?<-DMUgQ@Q=&_T0d>RQia=soWOA@i{ z8zm_wBg}hF21kY?CY$rJjzf^5t@u8@Bo2#^`U>4QN_+uz(5Nyet%^Ua3|nU4GJL^j zBRZ!7WE?6i3N1uc61gGBxHPtJMDf*>QP}rm;=B5BRAiq`AQeL*aD=|BO`CltMaz)) zie3AIB#m)c_#VPDRdTGU6LYNx%Af$fREBiE#rrT2`eiwGd<}WdnTrGpVgnw;gaLrK z{CANi=75VQ^`sdD{KC`QK%&qkPDLPesb?CEiP`(cA5m5C_m=2@{9&Q-vIEZX@y^BY zI9OqTQFNr_6g}w9wl@6~v`*J7M0kmW7-XnOJ;Z)hL{34fBP@8@3RHw1R?KEoF-m!p zBu+85Zd5^x3?L||CGV+YFMF?ae70r+7K0eVD_M+B!&`*v`nk1AsVUBrUzQe(_hkq$&0 zy&>u0*xtw&82W4)i75S2jL*iCdqbW^#b9VA29NZp8W{Je^wzH?J6GzN;k`>OC$FLu z*Z@ssqD$}L|Ary{o>uLirw1E;P%6AY`FA<()d1OdUgV7j;$w)NKxuT!eoUp%v<}E< zmd->pZ9dh)Koe*mTMlok5Rp&#I1nNOeGx0RMKS>q9~|_ zCwp3|Wy1*Ti7z~VCfP21dJZeFkxN*FEV+}5R?zU0eFT*P`zVU%b_Q0j_7bWgX*2yAMQ;Vv&oS1haibr3nbRidw^;j?9Vuvn5NQWjuG9X>i z0~H$q$||GDZ2m3CMW(&HNZqgeOjOy2Mx6kmYP426YEBr*rz+Ca)%RR!vlIapiITnD z&?BAkg`XGOF3`BiGQ}xBDIwxo3HE%$U*38nuct^^9OU=TOkAP+>j6ckpvYlil`E|L zn=obOsM+8G6tYZFvv}nd-ympDj1DhGxplxgIiBc4-0DlTCVbZWAMoLq8%6s$ag(r8 zKTyp;rj)*{h^NC6IVhS7&9jSYpTvPHnf_+NAx~&Q1+>~a$4cQ{a#0>v8Er8}k|IS6 z3TA{w4B{;i#UHDHA}+!dg%gD{i<#epdjnZ>}YrLj};7s3C6W=0oeivHnb zg%YW6pvGoN7rPd5YWuNc6jn&pm&F_-qGd7jNEfQ-rQyr1&})Xus|l436ZEP-O5wF3 z(yIizq%m8v^bQHf&;Q?*IU|{^fdB}T64Akb7E~pqJd5Q2%*HtoZc8x_+&2|^o zi=6sy6umjaFkVEuoXB3Jk-e%aeSZy6I5d%kCYLP>4rony_;_slMc?D2%m&wojz!qn z!+FKi@$8Py=VMC80r?wU3ilz`+CAiW(v$bl@e|P+@69F*2)zrBC1SY13V89E=*k(c z_g&(y_{4`!>QfR~sS}#+=$y0J1|OQC&k;OKD=XU<;fPV|N^PRMarXiizb$T+HJ1qO zdwsUdP9hRW3@}HPzC$%^p5c<$)o-42vJ=54tSE{Y8 zsmc(>=MkaNRt)caKA7Y&$W*A9&1K1b3Cuj9L}DN$s%{VOPy2Gr4w|fy!p%W!p_u*1 zn$NBNF~`7n3wbZg?F#}da8E|yai`ms^&nmBJ`I32lt+dXGc7u0#(1lJBf0Su7s2#L)N#%Dk zOg!q`H;ZKBEl8i#z2Cjl%b0O{j&)X!T!59gYn=4KH$uDzmqeO{U1Z&T=1rfyTzQ~7 z^s^pCTSi*U!N!)s^F{Ys#MU?Z%l1W!yNeWNT>9@jq*{lSAR>rx**1>I2~~^e5fpmySkdY}kB(ZHzD>14YASs9_dof?&)%Qxi+uNoUffY~rruOI| zyeo<~%{Z(&d?L)sylo$j(nt{0Cs|()Hd9r4%Q18WtbReV+^mR{#w2uxz=DBAAoUlt zMBE4=d=w0FBRY0c^mdAVDC*N;e{#&wNR+S;Aw@tlTE-{FB$0Qm`~;@(z$C^tXKkI2ghJYx37z1-4|qQJG#4ty zs{>Avm=w@vdx*5zPF~JDL<)Hyw!M+)A{0QL-9gnz3G|dSAqn4;T{a6hmV+1Lkug9G zG13w6b|qTFdjEz<*AZ2SZ=@D>gsfGC)#^+8GvPR?aE;@B8(P#SdfX{a92s421KW(K zc@gTXT+c4%_|De?(Ifz&#XX6A#t!t&G?1$=R;?xhBGXQxc;N?zqi}W8;Y1q`;>jZx zR*&DH)&yHCK9q*%n175(S|${BwF6hS5$lR9L+d1rokztNeTB6U#gB47i%!yUK*JdHMR7zZ0nI)nAG*`xAdRe9lcK76 zv)M(S6;~SzoUsZ;(5BP%#2NPkm`MlVjftY4YIJWzhm~U~j^#oisNI7}uLy~hcN9fD zBRfrbgSF1lFg@au38)KfJ<9SUJho(yGWvpcU`3%~2swh?`aOb>iR|5MozqStXz>I+ z*j999jgDBx))45lu0X$i+`<$|5zOXZ2gcGqutGl%9y0zh+Oi3@>z+yUxT!|(Y zJT{ux*}?^!*|R9sTP-(Xr5ap7o@D1T$$&oR2yfPU|JId~h@=bE;G{Hp<@c zt;6Uyr5+p_`fFcNHRaA6yEaUUGR-xQ620VFKgn%JHt>N;?E{6I`rC}#lp&Ad3o17m zE)-#VUwUuN{8$hb7jdaCJSu*0_{77fhv7&_-R=8Bb1CbO6^+{m$Bx6juuz}NG`^zf zHyRDq?<}#wiM;z(^}UMaNkz5c`UW-;K|Q}S_9fbnM(d}@6X|hf8>G0xd@bK~>^^jU z_Is|@qDF9h+Teo2jA($_bn-Zk{Rb((Yr(PA=w4oawh`{(iDh@F$`4%P3dpM*2DJr! zC*hH&(0MId(*a~>NGw}cTd2gg-!}Uln)Yi?IE!Do>=aG&4W#YxyKFM*uG`LYKl}Do zQf+@@l6(IPAJhI1?FtKZUipgr$q?wX!E9rprNODg@J+#@qq!Ui9e#r04dij6JT_7N z&l{KJCt;gwO&jGr)TMg^_`7XUo|Va5C67x7XktjCckvbpVOL@nyj>_`2v7`t{8nG8 zI1n@)PU3+Ahr3K;_9CY%Ru;0p4K<{cBWAP1kRGOgm^p7xO{O{l{cpdjuB`uZK9!mi z)8M?c{YnYZ+2iXp0a9_-VMgr>i|u)jb>;bHeYuKaq30GeL)sVwnepGzAL_jlsO<{z zs@KAMGBWy5)4Q@elSL_+h)H;iwrsC2V`YIY!;0OzpgFcSHmgmS`svR0mw_x3HtP>q z4JRbbsP1ns_(tbm@65BXLN&*25t$9njzd2N&_WnL6F1x{cCdPjDn$AblT%{P<3gwX z-gdk1!f5z~ztyqycOhXQo*qmq=(R^BBaNtP2B)Y?9tU#b5W@)}=DY*v$Pwwnc<>yi zt18_JQRr|Piw}@{c+*DFV1h&?Y-GZRC!T(PEX?5snm#})A$umKOG(X=Nv;yvf1gq~ z;k+Z1W=1R$OWs_ZAM=zRJ06nkjGL{4o{m)rQ7P^iU(_|xF)$IyRF5dXM46;l^NV&x zt7I_Va~7>wETc2N-XRnf9{3zXKY&(;|jkxR_A1uAH3 zVb<=cU`SvK(t$azpS}`&NUBaIw9$5n0QGzmCOJ4(oC7+@3}b0mdT*KY{mLsI z1ePI7hL8Ai>o$#f9W-V1f}Jx^*rfMFmAOqW68ifk#!`5xRCcjB%sO9uSM(dUnBhDv zFF?^>c^-EM`HOtgQCocnqi;rUb z?FNMAoSSYEP=1xs0V(yg;5v;TpXg8HGf}@4C(chn(o;vhHpQ3Y*V>D zD5c(?M?0Q$&o<%6pV%F7m(6ysezcP@%&dMUs@N6 z{W#BG8C;MWZN5%7_EF&Tz=k1W7$+?sPHF6r_^>7IG1YoygCfYfbuQd-W%H~abN5Y9 ziw4y40i%3b08>Q({}0}cX5*4?9_unIN*|XUMmXZ1_zC(`J_s-F(iF)coMZ+eeE^?!n#3 zejfZ+M+C=DiMP>hL76T8mfzt3=OA)nNh(N>d1Nlg^xvC~Bc12^nnzv@KG`GOmF|wM zRNpJxyXx{0`LZ6MP;)VgoA7$!igv9f@{&!_ZwXIC{vY9`@8diJPfxRE&!TG(V#Eu7o z4l7lu_uvq;akIi*+|b8hu6Cu}66SVAFTSo`@gN8UY#+3k!JQYan#dsu>(!&nktpsB zN1d=(zX=d$yOcm-NbZFp{d@*+)`OcB*4lLw6Qix%;vjKJ63b-^B@w61yFy?Z)$dw( z4N<{GT6cEb-D=8AcIX(qkhyK>Md8G@?%rSndYbFOAD5V1-!v_@7lzLQ0x`mnm1HGV zAyHmKK;QvD2?7EFya9Z-yx#n_g+c~^D$qcMkk=Jx02oAY2_zIG1i0kt3L5r$|2mJv z3(j9xTps}s+JykPSh-nP*qZ;e0ICPvl$)7b*c!W^Frna8-oBKUYoOZu(vd{H?}jsv2tw|1C+n*^#5rV zSs4r#1mGSBHavi&oPw01nw*lNp}d^hLjV9DbS(HfaY4WEU^_p7zqR+v_8JfXs3+HH zv|hegNkaAkJowT8P>_%jt~WOYeAB==aL=JZXNCqKgU)PcZGU|j*vRe80~h?+8E=k< z`Tclf&;R1_JZR=}zjRPO=-;b@pTcwH#UIK_0uaE6pnxvVj5fgq>iRN~07%zc0KlPU zf&~De$IFa;s467|eiZ)sxdRpbVZv~_x!&8#+i%marMITujjKC4Slhe0ToZJ23kwiiw{|zbCGf~JH$)runy9xm6Gy}y157SAlu;dHi)x!bkVB%u#><&7}!QKT3 zl3exW5`@BQo4uLxa?hv>`;6E9G|Me{WdgTA~@3B?TgK)!^{^v*jv8{g947Yq0_Wu}v zy&aE5)&oDM5TN01@W{WJxr?c@wIi5bUO&EICBS)If4%z;9)N#P6)ukEAmaJcB>-Zp zAXk7n(AnI@*v`=wL~fDqgYEy7Gl2Xb&hUr2<(D)3U)C(hH%P$U6A%4mpng2&26`|R z1`}*#z8kglPo9K-kS<_Df9gp9AN~)Hh+9X(-#r{O1+o8^?vZEw-aT**?C1cCxqs>& zsOF~Kjo;)?yZ6tIlH30M?c-&ndHq3+f2MaPziN_Z^P67W*8j6+dF%IUJv0m>>VH+k z_(uL}<4(ieqJ6i$L*Nd9I|S|!xI^GS7lHrv8s;}oLvz3l6UzR1vyp}V6=TN+wY+6) z8rIIPZpOAiDbN@*P}#x78mu1v`y+V3p(da-kxlpT##a#ZuhbV7xQpmOQV_=lCm;Y9 z9)KGF%E4SmWPyDKH#OgUp}|gopY6W+{CW2$4Zf~O6wT3eW8Aqo0GnWuJ z$mg~_=mem>{XBzF(B}8ll9GY@XFNRS_h!H3pp*ZYhu@y+m&SKHcL>}eaEHJh0(S`9 zA@Co80GNjdfv==J=R}Qk2{Kgq|3bvic@Tu=5~$_&LGtQ3W72B1XEUHpsTa- zBalG{XRrT$^?-VFy&(wX9KQ^UhZo)e3m*Ih6MJOI+hKRCe~AD8BM|{M_~*mpHV+4 zLYJ166bG0({cHy^;c4^o0R;5V`w77Khkz)mx(3y5Wun0|o*C zw7AbrLit(gZtiSsX`WqvTMzCkIAPt?|8#E|TA5p0TDkWARCf^2;E62Yg#T+Z7b{~& zb3=1yX9wqvs#}xSlTJWwk#E{M8=G0XxmdK^R)a+~IKhCv2&Oht4nObLP2vC6&p%U> zzx_G+Cty_bZ{WNwzVqtbB5}99L*Nd9I|S|!xI^GS7lAvk&TsQaQg4`0&Ci?t-#>lh zC%@kC8_K*9&Y}NGIR9UD$*?!y0PS}G$xc`h~uisj|Y5Ln|-J3rq*|)B--x}U! z?hv>`;0}R11nv;HL*PFKfnP~B%Dw+JleGWiXQAI*U{F~QME2CJJ1i3N-)(^_y0jbq z`IEx_7F-4Y=hIw3bJuT}JlI8YyTBDbXwo>?4*>qXp}IE?lRtBjTt{gAbR2*;$RK_2 z*QB2KH_HHkg0ZKyotqud)Y{q9&5p_V#^YcHR0EU!zZrJL;&LnFP_CUlE~dt|=7tu= zV7D3QRG@IT|C*bINrGr=nVUca?DPRoH$l2y_sZDJ%+SHY!o}PbY!ieJ2$maUM6dV% z44>;t&>^gHPWGr)i6_%oVpneZF(js3UD_s!M)&#%^?G;njK|0?}HnY=mf z)h+!7Ez$7TG87zVcPk05iPyIhVHyybBGjmJA@BBZvh}>=O z5V%9&4uLxa?hyFTL*Q2ue&_%BPg|h)H~$Z4i2!6{@bmjeF;HUHB_;uvVZW%foS7Sv4kqW|+`p?zTSQ|PJHQ-hx>0GQ ze_N&9hz;OBt>yq;*Y5Y#9Kd^j?I^jGEPv)Gxt(c;1h{g%nUEu4aq|?`{#tght_1*Y2CBg?9R$B2*U^8!mtko9o(Ja;O@H0jbo_Py|LYpZh%Ucg!0kE* zw`bvwmR1A3ml2Tov*4E2-)?W)L4$OOCx8~Tds9Ppy;2AOK=`x4?UwFfHT2IUhTETL zjFIJE`*ut%?+u2@aW(nO#6s zR4yPlKR@0=bmSJ9Gau5w&xPefDd^yT`7pxL;vnmXSvvZmO0lKz^TF5FP$$bo(Z_OD zi5qGk>ys?gljt)~hI@w>=(TK8X5Fe$F(?Q2vxnReUdV?jx)aAYml=>M#P0@kyy0vg z8md)aQYRzA!Xx4In}TF>b@Fr9z#OY-o?F8f=i?B2s2zzjiaPZ9&AqBL#PA&%-v)M6 z@`ya5fw$EYs%`^xTko^d>nZltjd&I;8Uk!hI1c+1kRlxpF$w>$ zzFLprbLcL<-87D%bUN_yS*?i{OCGH2*H~anVnMR4eP&8}&C9P#pUNEY?a}Is+FF+J zvwrAk{D|X0=1_k9=3+KS_p&}zr@2%I*Mqxl0;(kZkRs4)zF7^(TKRY;-hKa%nUFC$rDJa?4k-A8aeJ zVbbslN+_a*+j%IH3Q}wrJx_kKUdG!*M5%hz`Q!_?*Bo18LEUg?Pu+8Or;ymxs`D=# z&z|?K^?eoO^3;5(7BNDsZo{0kxrP>_MAb_@pQ(Hzui7~v2j8qFEF+zK{^L_`_uR=r zoklKbkuKl3K{9DMY2owc>XD}Mgvd|c5Zu0vmCqtgxyIk@_@~w6N9RsJF<6y%Atk&~NySe526=!vOJcv1o& zO7)4v*uB_=yzl2>=dHdvV1TZeU?+y_+BM0gPuknRxBDrbb#uDAv~fyE)His0u5-K%Ev1cNXJLNm< z?nyuV+}FEblvd(uAjeIJn(mIA{5C`c{(-)jM(TQGh>NibJi5^HdzA4R6<^PhOHR}= z;9Am)+X~UeS%RB&8>WiRo41(zH#t?%kMin%5(^39{19-iUoAfSU%y|@;8HG?RLfN6)aTQr?@r$t(0+Z9emV)Vh&u@b+j4bz-OOt~`!W?Yv<5vtFwDbgq6ey_}sHr(cq>SRniD15c zpJhkGXT{|)ec%(Ne|E5UNJ=hklv?VH+h1RrRYak#A<2LQxt5(&xzFRC5LE5YHq+bI z+_5X|~BOhbc&$U_zCjffA=S5Sq zPe~ZaV3zhK%G0u{`F_CTlaMYY6wl+Ks*M|P)_Umit{kr z;CM_Z7191d$q7=Hg(&DRn7}ql?UE$sHm$AAA{ z>Q^|`fk9WK>To79taLOCI`3i7VI)cyjw#MsQbYlpn-53{!5QG;+=nA{fpKv{uhniVc4Bnj<@sAy- zAtwJ!)jbw5mOW8LZgYQ9Bs23xfyl^tH&r3Ij%ilDjuK1N`W2<%;q( zuoj6rzwFTthi&+#UjTb`VEC{!2DAz~A$oroyjyGZbG=C!(MU$0p^)M{>U=e5EBV2% z7ViZ_Q5)bm5R=~55brQTVqyy4Z^ZUdSOClslye*=X@-HelypQajPabA*sz*5pk_f z%|!QsuIkr}*cdctQT^z$?(S(6hbrBZ+LwLUiVSCnywVm$6R3FjpTqVYq^59@U^j#w z=XYznd5@%uT_GvPc6(5&*vK&91J-+=h!ByJlZmXH&!{rio3k>%kVJUrCn}&Q&M(4^ zr&h8u--YNAvolFG5kPlQr#kJZp$XUarus=AZ{Smpdy16g#AK926hI=S94HH4Bl3Q2 zHF;KLM@xfDe@Qt36^De*k1tv7Awkplq`#e6f;^d+u1H9Fq@wPcZN9X*Fv0Y`ukzhQ z%STEht#bQ&oABNK11hN(4KH#MlZ(n;JPUC!HZVyA7Vkkp%{CW%2;gq?7N-W8=+=fJ z=OIr(l)RVUIZn5jnr{e3-=xMQvOL@9QKpE^=LpD;nS6aBgiAyx5_s$% z9BqK{%0XF4j+Gdlgo53+Fg?;45 ziVPWQm^xd!MmQ-yNbDjJ7IK&}?5la(VJY!IL)*~VHv#X0orO&`D{FDUxS6$Fj~+|OAt1jL%y8v1HJ$n*IH8}P6<8KGf2$J6X^?6`;R>Mf#@h{JoTBewYX0k$kGWy^X>(W$2*zpcF+pUBp<%BtyS^4fu z%8K!E2-X|?X_^^TjVvWQH?wTUp>Q#&l)AR~MHqE^^_9+h zvZcMSta*wzF(Eyjgf`}r-<+JR<+w!!9#}mTy3d3U#qxkhjFp502b+$IfK~{XJ>d1T z^gY5InX`07vR}-|1%*x?Z#FrL zdK4jz=~_^5L+`a_KD7pgBC#%OQ0e92z)qSPfbWj(GB#vOpf1xmU&1Riz;` zE~Xl|Ket6buFOAabI%#=j2IR0V`%8}1yKX0=OythKVqN=%ZIn;X!XMo5T2}L28C%?=yBNm#62SUJsKkm?pAAIjJK9i^pwWeK0n=Y87y)#9CT>F-pA*u zDdEL6)$#qWJi_$cGUP*+w{+1yn8;&owdQ{Sp>hK>ZcvVnoP zZg|LRioEyzZ6l=&+iPq6jeV!~`B{$4-JzbE9(n!r%KA28Mw0ZqE1!3BGd}(pyozGd z_RAXUYl@Me|#Yp{cm;%w{Pu})$;bEkxdPUpf%W1G5AAag@ zt@D1tT1L+xkzpEjY2*0h%02*5gX9Q#lbO{|n>WBQ=CbGOyQ6(?RHtBlXz}(CG<_NZ zTGCPGjs0kKugBUVVc8)cE->cv^thhG$R}G(IWwU+y;{!N&Wy>(8@gY|v`WAKbWvFU z3rw!@)#~>4(Omtz*M&8&d^9=a!_!_hmF0J=9WD&bY`yO~mw5I4;pf><2(rc3FjbF2 z4IKwGzbHl=JOeU)Rr=c7{DPcEmy26niO@h_LRd~(N=b023+({og$Eo1?;&Vet74%! zUX{Vd#=4}el2?H*ei$4INukW=>hNZeAtm0Ire-EHdQyU(JQ}xgx|>~3&qP6wNssHi zGRP23qvjUSmGm6YMeOd4=HZXlY%ilxlj5wS+h1;t5H2`pCn;!OVQNO@7@%Zrj|B&b zbLulgK(BCxo9wOQUk!L zfbG(^xMIFs6-j$hV(1)&TAfc0x1zk7^7+Z)h3d*lidlpV!+S^O(ABGP1huTCRBRm~ zRX6)opnKa{@E&79PxNgDArZPDOQltBW(d!|vgJL=-?UVamy`>E&`9&jdlT}_@MF9@ z9x5C>ig0e#vv}i&{Mmz)d+T#kYAK1zvI4EE;$Nn!3UEDoq$5m3NlwV_m}1xeO^}%{8$tAm0;^G` zg`o$<$z*k~vzaV9y}G%2x@GE9qm-^oy&C%~{$Ra@hB!@cbTQ_|=qd*tEa<)LJQy6} z%N>5Kjfuo2Iat%jE(0galZ`bc8LvNW8=;~Lt6{Y_+g-9Yw6RqkjrcPA`rDX#m+xYe z02W_Z)8}p1#yMyzOYlob&>OXXsSNg}p>_v8G0}T=41tM=fPljJ>R80=kuV}OEQXo} zeDTq`2od|%AuNWBs8q5EAFCh~8XGU$+pn`@jCKkQmmf)+$Aad?IwU!=Hzc9i4scjZ z@zugo9D(R+pa|qNFu-DDOEww>1`}qGn|_CMr_yTR6v>$#K`feY8(CQQ7Pu z`9Sc4D>b`_y!aELssuCgmtj%XB%VG4;?$v^boeF2In?r|zm%n?q-S{QWDoWAHP<$d zcDN|tSMr%zTxo?}x22mj=td78tMyA!6r?9&aYr}2kB+dErYvq?>mFKyGiX!SKir+E zb(KU~A8ngF-L@Ms*Yb7hNqTYfK0blg6&GB23Jq2ugsCKAYt5Am8R5HL>X1d1dnh|0 zLHnqh;d51!M8=-B+!y^&_8P?nCi};csw`yqI0Q5*^_uC$FH^-}yFNN2@ONI+%hc6I z8wEwm2!DK5JkEa7j&j~&YM-qg_$=}HG$JLHLSxz-&RPD^2+7x(whsVe!dH8rAW^TP z-~+HVK0!i}$I%v9rcA6RDR**3PxbY2dz8O~2{T1;z@<7ycd@d{ z$xs+A%tB53JudAsqIhU`=s*h_Z|j`jR-8w~Fr@yR1wueIxXc4D{Ybu%y(c=<)9J0$ zCUuMbT7?xXA_=1iKVU43XT7mB(J7{Rlg8w+)`yTm%HtrMBJ)}5Gv`26({A<+Ct-8d zBdMppA@{`;G`kh0a1Ms8P=BB@!=JVal4$TB=zR%7Axf(fvDH`y=2tX*+*3YxA}RNx zWMl*6aPYE;Vn6ws z$k}=EF+W5>RSTKe%*6oB?dZFD=0~#7M_S6FXgF9H_pqVvVIt#VL*i4g8Pw*UEJ?nC zwJYpGtc!XRE6))b;cApw@UB#=3#G?R(xpdRLaIsDm>W3Mbl(s`= zaasBE`*e6j)C`1l{4BIIOq|?8W)XEc1v^#c^}6J7kA-A_q-3O=tb7*6k|GLRPaGd< zvvcz@*l!6$6;@?t_O0|K#izE^Myd+ndK@Dk)jEx5rKg2CyFRjdE;zba=Y_Ia8Sdhj zcLlgStgq=B`o#VNxjK51R3>2oR&H;+C*msm@cUbiAMn56 zzNvHZ^$X8RbF+5}D=MyRL(TXYNA1py2ne?HO<9lnA$Z4pIa?L15gi)I?QzYNWV@H$9-2h%6wCtUMfwCA6 z6W3=@Yy9lrO898R9b$boC9yuFB)pSMI;r;`FMF9EY9V9#sgu)CE|+F!q9MWjc~@Pc zh72dOMpk)gsJfPcLB!BPcQlWLnvRL9gPE{jQQA{K4`%~PiNt8nXKw>VKd=BMs`F>^ z&F&*Hvg6~iOPU2G1e@pYt&E$9DFLbFcu8>i^d9nKu*yAfu;FJF=choY!KW8y#L`xk z%wAGsBWC4iBBP>Es+*__%j8VLdfuHmbC1F4k&6Wz-zTTa>H%9Qo4)AKgeWVU$2|1r zN;X3rn*x*!zz@mA6@f2TdYax=ufHf5`sVcc;M?*<5$@T^+QR-f3SmKR6-@~qNj_qz zE6ynh9&&Yk76Fp1x3qgh4TVj$gUhKp99}kLHvSsIOcug#)zd3oxtmx=83!TsW{L=O-RIT`|SL4|G1pjn`mf{EYYh1wA^)KF%eYn4y+|?AQJ6uG%lhXFP@tt1?l~LSwY} z2}oHbHFU)8^Yi+KJoZ%OWuhV_V3d~D($lhb@piPcjL32H@pON;b%rczqEA%#auE@M z2>s;V!ROE2-<-IC`dRW*u$=jFT;^eZjLUF(&}pA8Hql+PYjg3)Knu@YxYksg9xhE6DofhgudbWRjhIp{hE2WfPChu=0S)6K~XHQ}6m-c#@EA-Ln^EP`a z12P&7rl6`QLihNXX9S%l0Yt+_y=Py%pyHOOQs?-YR`!i6q&JO`- z%I{Vx9(k+57kilKSrtf7SI)BTCh zc)8o%f%=~@=9~Ld8^XTq0lnxB0LQPMoKaY*{Nv#YQ#}W}zvS@1>s0$`p7K6W2%LR! zSM#WFN;4*Kzt-Q-hiP?{w%EgYiv*G5{v0jTdo$5fCveL~*O0eSGSII3&>w>zuLNA8 zS)eK}8T#$tqqdlHAlS2VMnQj|%s_CTSykVRmiZ0j)Z!UiPC*v#uhaMf z8b(@6+>>1-LS~A%1>vKApzJmY+5uIIaG3eh^&-=X}!n3 z!my;3v-?MqxYhpv-OAJE=ok~qQG9msxP@(=w7K+WHCG4+V&F1bPkBhC;VPzLSr;0t zMhfrLr<%zA2tS6GnbQ2M`KOA0bcK?dsFRO*eb7l;NIfL&&AFR%PeU{u3=b3~H(|Zf z$jrWtgLsR!t{gQ>6M8E#m+E0W?r$U(q!SmrM`h#+`L#TzRhwAv#f6 z3Bs-Vd78A-kakWN7XsbG;O++>B|24FL`6$XMq6B0P$Ko-+|#wEx07>Tcxz*7USfl- zx6Tx(>>)`dRy;cxuDas-t(3Rkw5YkyshOXus?*qe%^S6j75$DLq zq@2PP86`i0PCZ;<5bE~@u#AtWrKZL$ZEChBrJ^}^dIMdlIZx(`+g1T{T1 z6|kSW?Kx*-WLp;Q5=fP(k52GqB|%I`VWd4tLK6J;yHOJh1Pc)-Oj>AnhOkWW)9U$! z2R1LlTV#8Ti-Cunt=2WTH+z0wD%hrmg`zO<@z!%fzB!>R%g0GrDMtX(+W-SGSxUO8 zWA)(J=J^i}0t+-vUv+6^Yk;W8^d{}TsF#XD;GagTyk@;`5Q3bg;gUPJW(blGAS7)PO0+{B{lEAhDR$T92E%;88S3K zK0RMYMNmOYe7M60XP&LEghpCbBl7F%<>CT9aGHpKcu}Nxg09v_mBcq{N#lb}UzT;X zL9K*j9HzCFSvqB9W@}A3PK?0^1MJkou9&OOEQV=VN-)fbc7VKNc9TZ{x8iSZr13j> zwCW)x+k02|k$iNJ%2;JT4hhSrk)scZtFp1Mq>LPvP&zq7hr8Oste2SZcjBXvi$4Vg z?Zv67(l?H?m1j*vOi)EYHadCK+sL?nAl$T^f_&&_qrBVQ)5EMt=GpBCBSXJ;-S zCnDL__*I<-2|Kpk!=f^wc7tnIN=~QM`uX}bVQyGjVoL7K!_@d><}D}_^{`nDm|j?g z<)>E5A(acoXyo5SDx+k+fk%gTw%?im~)?~xb&7+VoC4S<VEe$L@a>Or)45&ZROj|$b zkV;3JMh$${BgV|t?Kz=I{r=TCI(X8Jgf%w%vx*^jck06LC`&l;qE=9 zzR^$d-`A;wE#C{U8zvpo+{RTvNI_*=nYi(^p-jg=5}PTArVr&Kpl?uxVlw!nhWkVa zd(!Ib+v`lp6c{OP#Pby$eH!PJr8DQp`+SQAqgPPWQ>f$VYmw`IKcH8esIJ-7%j4Fj zp|z%;r>yxW10}j)OjTWY#pCfvgQ3p9{2ehR0leq3d1Ty6$@4ce=cpsAT~H!M6nJW^z9kixvQqJU0FRcVIOJwiWi zpKYZH8TYJ87ke`T<{wjaWp=aMc=tBhUcW_j>qoT$5 zQb6omM-JiK<%?TNH8VXlF5P(H@49Kf0PN}=ssyG}B#)wrovW2_uz>aO z;N$VY;L8VraVq7tJFl3KeSV*sp(*vPQj8+n{wW4>e2zKx^qm;|)ipVY{5sNp3VY{_ zO$jhUE=VbOhv@x8W$>82f!oB4O%*St+c&#YAB#4+&5ER?Z(Wm+j;*Jan1NL=A}T4h<9-Br!ZfBN`$rG)XK*STQuZ_6G#;O*>0tjBN?|O$7)o4#=>IT~93%Ab8Wo zz`Dh*q^iEO#`Hd~8!P>^M5{gr>iba84kj`!D;@04sb@(-SE`JPHbwNJBc@qKzVv*4 zKNIEr+zYFN&a`bFhZn1OZG4)bt+ty)EGZup7HhDqxVX93E9bIckdUltgqrZGsJ*3` z-g>~!;s_%Gi}RwBoXaTqxq~vNF%NInOg((-pN^E7q`9z_V%<_p@C&zL^gbqvmN3d;3VpM58tMVlb2qsF6FeA>G{e{qgAe7ARolPG4b~#Kf6sCBXcIa($P- z7cwavEj??UIPOoX{nt0ZiF1{xm{9=W?BC81OF9}A6#n~Dv%i>qbW|Y#?dS2`+v(%e z$j{|BhLU4OOjbcKDKbcr>m@2XO$%Qb88S2i0sQd*{qDMsZ*7*Tzr!q@lvelI`7uK zuc(GUumTmQK5g7kEb>zLe*gdg02A#MIi1`>eV9k6h>4<^{17bl8~H%Z+?Gri?ztt z)VX|OQc+7xM=&WaCLkUZ7$727f}Eg&USeQxbb^z9Zd*MtC>tzPXew{UaASua5)>2_ zBQ!lKFC{NqTt9d?8hqXA7}_N&--cvlV0@L{FJCzT`S}kzjL}%y@z&kFj)FJmyNOIlyY?30 z@k}u1h*C{mUSwoxYegIwE>lukXjgTm!-Po*#;})%euj^(&CSizxT&tv1tTsY>$s+h zWLkl}?ezm9Cq6hkD+cA`01+lUI5}8?i>$7Bk*msRIB8G!Otph}c4>8>0ZU9bClmYn z2L}rI_DQYQ>)y_mgi{jyGPKOcF58T$&BL0Cc~m<*PFqe=TWErz=kpZ(%9LYosr7-g z)4oird~~JDEJQO4(}+I!2XC$j>Xl!wQg?@W>Yhz_y zR8w7KW?Mr#Ix;3VT!+2U(csdyr=^dVnwHlTT184kIfMBKJMx(yux0zQv*i^B-CSIYw> z{8-Os5BE9Ht~mfmftp}No0+Ugun|em>Zn-nL|3IAEn{RbYA}GhsfQSp0%Ge~2j}k= zTO0uS;H|GaWbJ9-99J%-|9d=5VrEu!x1LHFwkz7li(zPTJXUE;+3ki~Fm*gUXUZmBghG8FdfOXtL}i%m!6F6AyPq`zh~?-&cLy~- z0F1(fa&dr>eSnmtXmYj)*_CyCPcBf-+}x>O-P7zJcdVa?IKSv<$sNwu9oFXN#`oR% zWb*!=(T|JX~~YnDX__u%=JCF&(?#hl`N-Y{QF%d9Uah=KEzL#WPUT1QXk)yZOQpgl71E;UEa#UY;XnT~Rg=|t$ zJ}xCRO-M2*|HptoI6W&9I5{sjEHz<|VRP$GDl}7UNkv>{O?lb>%*MshnKI+00&Qv$+nRrnkB+Rvwz80NXIEvQ z;_K_($=OdF^0+f*F%Kd&P*Ev0Vvwhll8%8}O;bTJD=INPib!~oej)`KB2DWa9xNit zOyRqtmaA8%n@%zm5*jR3iVyY@K>Fy>y{@ODsLTc@B?s@&z2hHHf3fEB^9LXo+=@OU zQFMZHVlx>TDLp?uGy2-cv#6-Py`Fz!IusEjKuwhOEf?NSE>A-`LPbJ1Mngn6AY!f3 z-K1z-S!!irQYs!98Yxv?TR|@u5hOx(ZhL)iVReCZae{<^oc(r_&1&cC95M?SDhLS& zBsO8ElR+S}Lk`STK~;MB#;VSJn^EcU?%FCYpOSsor&O*0xo z#0%VE$+2Pl4at#VKoUPXF=4Ig3J5MMbVgvyxP`xRE7G-wR7HJEwD&%VX3S@i1S z)4aK@nS_Lw#|s`RHAP2PRW2bK0lI~0W?n=&FE~UxFC{Q)L^(qnB5jYa!tUMU1|9b$ zjj(}Q9JG**fNE!0P*Nil4G9q}cYb7LWoL42U3ad*zj{+nGcBq5a@03Fmiv*z-or0{ z2Ew$8l6+Y^7#gQ?ZBLeA7f^wyr)Hs^eaHe+4mDeeTTgY zA~8537(9fjTU{j)3Fo`0zw9;B)d{y~lD=AEca(QQgC97hg_z3*EoW}_zNADqOC1I& zV2mB690TM~p9B!EQj}v~^lkXK!R{ zGf`zx*5t1^2ER4+{_;I66r_K2S|54KPSdMozdEVur&~jB<3Z-rU}- zSpUMCXt-BV$G%q8d=AId!oQStAW=N9mBFD{N2HtLTXcxzGkHZf-Q)i8rpQ*W2-jaVrIeSmr-Pg(_@){bxiGXbz@8j0; z{PF8BGWz65IUeH~*wyE*!HiZodX#sSY{=WdwvJplA_@m)5XMCTdQ|dw@t2f1YySoj z4F(!BM@LPO#?z(j0iZ23PD(g`wA1ir_T7Hudm(Z#5zvgY(64!$*)6+6z&5`{w<+O057p=O0`|RO%Lo}#$Rb>aifWR`#lHf=Af+_y2yV!Cw{+o zbW+mQHu*@^#UoR;6$~dT+{_bOfqrdeH_(WFXIMx_R6i+SQAT{&)8OW`nxU77dVIJR ziMW?^T{RIDPO%?n3j<_BTTDhzfWat^>T>t^!l7!owkqh!)ZBY}%+GioM-YbM9x5@)vCik;-RI1GP*YA+R83%Mg|W7%h$I*e z6C|y(@nYr$8N|%x4nYfl>)E-wehM6094|o?_};Q-Q$Ijpj+TR5JufRx=lD;o3r-*j zBu6sc!)7dVSTEpQwE52wdFd~R8I4UA6)YkO?%U3|fi`^yRW}gyz_zlkaW)`BGeK!t z9=rx|%!2Pwvmq!nHEDr&S}rFk6eln@Php(b-}UOn_}3;O-%|ZNahv|e)%*GLs)u_y zCOg4UT-j7)9eBx~IpJ|f17t>FN_1B4v{=lo5G0J>DaoSZH}W->)^TtO0PyBR#_eCJ z>mf(4$AyuS)c*fA(<$x0UB}h@e$acyVP_UZE3@j!4;f zQBL)(RfbRi*2v~3%~&hzr<&!YXwfx06eA8e000002ke2s3evXY6X2=P)C^m#z$(4s zd%64-)V!^ahm4z|rKPFE{!DIojJFCcJaLuSYp>?V<|DMhF?IX0cbS$1+4vnDSZz-` z_P?D0cv}e`P8*HU_7$hpyT^aBc96rdx5@Ww#F~`o$u9l~9Whu}8AdB#M)c&WdQ(0vX|-VHbqw6@9Z`pOgtY83 zQdx`h9w##BQmO1msKTO!imn4oeA}T+Ob}YXyc>b~a@||;(xlUmaf4%(;Wbs}h&}1Y z^bHXcf9l}5$D299n}cg~oDAUK0rn_+Uh#ccR&I`zbRNl<%qH4>dTUhHxjzdA{M3RX zUUz|}FZ`V|Bh53{mqUS+N_cl`-#8e*PNQplX&b+;eUG$CuhJ@Do5ovIBs`T2D_UTW zM(L!hQ6~Y$vJRo5p|iog&BwH!L<$=`COvYoI&nk#$_A!xpe8)Q%E$JiN!0xhm1#Hl z=m0#F6htB9IEIfr2)|4P7X84WpnXe0z=)sD$=&he+`nlQ+q$>Ew5__?!eQpJBELmW zbwgJBBPJRqKRO;F5bNR?0R=*HRByA+%g)r$=H0MZEG;xjc%i7Hp_O=A-eo>OED|bD zh04&d%*nx^!2l?K8zPtq3$uJuY++6%8enH*T32X{OzhcUNF(U{g&-I!0DoQgeG&FFIz8pP-?}(B#CPjEQ?(O;llSds#Oh zB|ukk~a!Xc_yRw$$=RyhqJSTA7LrbZC~iEFDL4p4ST;86X(-EO2s&DFWj; z!yu2gdXk^V2I~8dLeo8z#M!FaN=kCRV`-q9vv*pt^_x5etW(~3*v>;mM_@|~;8VDt zsKEZde<&F8*uS+#RJ|X3HAo@r;NZNz&X;?NiZBc}W;h1;Wpid&dPoT9{K=!X^4-hz z3Y2@?P8j~}?S*1yU`j$jJS-_NCK5Gx?))@G^xnDPW1j+yo24Gz8866e$-vkv420wR;gnW5_el-aD2FcI+2Ev(|*22;LFhFE;Yh+_sVGhjb5L!zG z%DGbPtTf)(=;{`V*92+rUzxg9%cFNY4*}oD#@-d1@zc88SwIoKO4ujTx+i-Xw215? zXWLv}Tah6M3GCtgGfj1YcuqAc^`DKIy5nZ|6HaVe#gSwU;`Rm+CQN*#j*xyu!j+JG zVw?z%_%${X?XGxx*0#+KVE^0j@SuQ?iHyoJOL%)s&;Rt2)|B9jH{ zFPLqkF^Jg~`RW=a#Y=f>-o|plQ%_b|#~=K_!IKKl+S|d9WJxX^PNnzeg7b9?vzd^f zs>cUWV7?rlE^U2-5R)R&x`Z_QF>j!lsjq%dNkJ|U;?3JI%15>IB``4@Za3UxvOqv? zd4`*}#|=-ZEE@gzzc%^<_~YgL(Wr1>c%HV+7f@0LwooY^GCw|0$_eNh59uyguAWkK zPpp+(gtXy$Ol_fmewD+ok8wyXC~WR`=KY(J#;NTj62N6!`+5+NF`zL%$(V{~?pmqm zzNF@F2}67C$25;Ydk45XMvv!=H4HOjLHP58ozr>1lj zIDQSd*kn8{Ej(zk_AYB!^?zL@HB4+ZKQGbs%0*sQk#6hp17C-PEm zXkBly6+J|giYbeOoqI;sy}P@;yPue}pLJ70QCx-oW0tIwQwP=0!H;}^lB>1J^B`H$ z8XqJS|Cbu0fsCcUQDo#ZJ+f;bJYj#A$GDKD&b-3-7}@K0UWS_YJ&;5Dsk+1sqgPIdm9J16<4Und6Z=R02fu5w_W zr*u&)5kSBn>`%;XmbAl&D+SAxY-pN;N>No}q`%1jjwoxWJ#8@FX-zBcMXF5g#Jj1A zZWHL{-{%Zz>W(aQ_fM{RNenAfJ>R2=h*La5MLtVuhm@S8v*ldA$JFi^3$Jy3XuKU& zgKkRsbworlAVXHB92_G^KMSmdQ5hmDUWS>>&ZV!k+S$;fe^pH?w>xSN9V>#@_ZTQErnT$D#vz<|Ya4zolUD$qU zUTt$|d1Ow8P81$SJ0u_@880$3E;2%pKy&F%2mLUP7T=Ug5xJVR_TBd`a9Ch)J_73~ zt?9B+4DR#r@A?W36)Qg`>vU0Bj>I=OC=b!5p7exmZ=8@Ww}v|z<|jE=PdFA1@ah5~ z8!UO$xT&OE*n?FR?C0+odWC?zq1cF5bjc$BK68oPH8~97n_c_BvaqL}vAU|6*XXKb zT0%lhU~6PdClnJC1P~oLq(kgynyQ?3Ss=Mo4#lIOy7O|Nma1bf5m%!2`tt6%b4x4@ z1_A{P9wjO!D=;iINl!yIKajWMk#tZtEfLnGG4BZMnOz6owZ65CU2A%XmZz<@xt4!= zgpk4)FeM-#6$bq1!Ktsy;qV6~NI5k$B|>tNdwp|ZKw@@aUazZdJ~a;P;p5c8%)`nB zgt!+kYhGDQCf8srBo{19U`jeH5(g0v94A%K-QB~J9cl_|w98HsBd88YE*lpkMq`Vs zv(Lc812B)HQ(tx)Hv#b{i-ZT$k#L~)>gvIW`Il~KbZJ#cLnRI$I!RPqR3{%}Xgn)A zAu>EKAQTh{3=j<$G8Y#e6(K1fA7@1^Eiq+4GB7<z?ZK_3;R-UQ-3pA5es#9Pp3lXm^;^UQZ)#W7m)a`tAFZOQ z&FcjY<&HWAZWT02^05nIHoUVJF(nsDw~l-zqd4AQKn%YoL}^GX!U2y_ZR}DY z_+CAf7+3d)pOcEc`==EMn?sDz4`lUiK~->!?%`$`F$SUS3D4w1&fjB4T;w4HOG+T% zhn9+PXu#|a6kejw23%MY*uhApPh&I!+Zih*LC6m_MT(FKY%J-=teAagP(U~~Iz2cn zBsWAzKXZ(QfTPFY*@$O!aV!-dLmUt>&sm@moV~vJc{a zan%C2N!0<-gB5;QGwaN23We%8#?1T5Q&7*vp+W=Y)Z*^+{0}&5xD9v?$US+q*Tih$ zQY;+pi&HNI%goNnotx7mMKe-tU~agLLGwVfvv&!3(^{oiZbc~r+*9>-C#UzWF$*U5 z??dMC*A336_sQ$u7%D2P7f&v-8FLHbI(ICs4a$|{CB6e@pu!g-7=*O4GILEt$H`|s9girrA-d+>)qc0NzhKX$EjLyr;%E%e0o!Z!tmbxTnb>sG%zB`Yc@EhHf} zRkqlB7%G6sl|Tv-Z|Uj2qLu%+WYzEd-P_;o?(9XCb$#4sA32AhmXn(85E8X0)QjxHw2N*W3YJVkQ}MaW z&Z4#8DVkA9=(GQGDt==I4lqh1qclwWU9;P@8iReU{`>{@$@Cf>m!d@ylFd>S`NWg zgr1e6oXW3Co(Q2N6n#r|tHS?O^&FMrAB+TZ5K zt);D%cwTFfn0mVGtx8c?Nl$x;m6W=%lWLBieRG!FGhU=*F*VD&8lgF$XmN|Bypvc; zR%UxpIU6KGrnFp&Ct9n_|0_(GEEO+}RnqnReR4R^5FRm^Ftw9SU&TP6L>^vv+mjPi zck*@gR(LSrysqLGGd_^evO54f%em$ge*pRd z66#tM1?Kh>R2%hPpWD-&r_}d!fMGEQ;^5&3l4qRY8zq)mQcvlU8>eGWPifXtad>oi zezRD-t-S|8j@=tL+s)8t>#dAfA`T#gY>gkE*wpI=&45h)})^qH^v0U7td4- zSa@>p{aZ&C4$>=MwdACAUZbOO+JMwFo6|O%$0TM;ov<1<`bVnnvvb7Ph zSo`b|euU|=($iWu zwB=9v8D89X$nwh3_RkNj9XCCxm&uc`^!&Q4`Kt!i`PUx0cVW$X^CnEFP5|*?I7_xosQi7dKAt-F4}WPTQVgG2Xr6(w+C1sCBT3Tx4`?_JQwa z-Zo2O6l*VTEs#B45qA7UUY+;2N7Ee7y)~FJvFM=J&`)(P^T(!mKD_A$aBFe1jUo1joVqe z_ZmELR{H3{!#CE01h-apoaS;*+O@|~_$e{gNHS@Lp)fTF%# z9nIXPbyHRG2x&KE?V8wciZ+pPd)keQzOmQy#qx})lcn!AKIm?`B4qO9;a>e*e;>Uo zDK8l?A;s6vMfXP4gID)!UNye`s$*s2?A_B$<>9_r!;*%MoPIZK&%);8}@HmGi7Y*!1w{)4;FZL2)h(^;**7w zo_;H@P%C|z+LZpi`gwWTyC#gy9J{JuYC`$$<#UdlIa^kKa06@~7bLh^nVa5ttk|}u z{DpzCvh9$2E93gC%l6mukF+vwdad(pyo-hI``4E1_x-FnWM%U1z_*`;Q;t8_I<-rW zumoG1fo10}|9T#;Sh3B=`ONX}s+u3xUwd0uoL45HqZ=6ita`EMp)p1~I#{Uc+WNZ4 zpTB(Le(`}PdY|9Q8w^OBIkx&#te&5PwcwYDmyhaq8J)(OC*SKEnl*hQlV(3`YN)Q- zoj;*Z@9t{<+}7&6{xnC4x4&IJyt+T;w{4?hkel=? zorMO&kIZ^=w^!{f$C7V%pKmN~>p#81D7RQ+dRyI^{5wSra?O2Hs-7sqc?i(aQiJRfD2 zf77qCN0Not@T5_Lg91F;t9(Dcdd=bOs~3&!>(QmXrdC_mSzC5(*}kIirpfp@eHAY6 z%8mB*>fR$@;QW(KcD-YIUrCJa8x)-_a| z=VEAJkbX7)Ay_3`qyZO2yRm0mA6 z@VoJry1QjY*S-D5{`P?)xUiETTbf5!)sd?7EB&8dG*Xi z#bqnXs){$Rnw6HjV$FiNS*e4^q>RkWTbjSK{P?L`iUwaKWNJ>mX_B#=m=HJX+<~P_ zH#ZmWDjeCTuTPs+?Tw5&Teq`!_wWqq>(MQ^k8@{lAK#Sa%knnVE;s1^WbNeen7V0u zre6DzJaf*DCgZvX#;>MNlG#?!ev6{>&poO;8?*Q2O^keaXLx@9zH_F#NmO?Z@(Iyg zQ?#dBl$EUA#}s|r4o&B_+Eg4ov31o0w=o61c3*lLe_69J-F3skfmdugoNu-L^oi>e zZ{;02e0i2)Y;^O0JyWdrEF0A7)R%^*O;^^ZTZQjanlmJFs*d`w^ST4<6_1D7T^#WJ zQcY#R!q2T%O;1oN>~Pqj@9l!NA1*pooqsW?tV#I?tWmK^@iq$28Z?8`XACsB9QTj4 zy0m7*6}MHF4uAA$U-RhG%Sj!d!&os!pO?7Q_{}u7nI``?I>K57OdM`oo6ehrK@T@?boRLJJ#6uiww8icDU}x_tl97 zH-6>Km_L6*@!JB^wk}Si?k23k<8u)4DQ)8h@rKh)X>U)SkZ;E~(! z-QdtSF*najOdTF_EW}tww%bdY=;!knKH0I*yRxYF!=_=E6HU5Z95eA_*iY)RO#NRf>-WWrj6-Ji2`TqNLx@WE+|9x+*%Id?II_ z{;mC`q)pB3x7xvlSDt+P{_*9?$nI`YC4=-6g6@5q8RP2|Gc0}S!IKw{Etrv;?PzQn z&^z^jO1~wQ7lM9QJf4-De`os0d1pgyLy89UoG0~7#bw@zggGbIZ?2I$nLi`tVU$T{ zE#m>thc!h^xBODH>|xgEaUIPQay_CKU;n3cb3w9=vYc9{UgOJak1xsD@HOthovOh% z0}dzqtp48G!Dvvy@uuLCNv}u#TKlHx`|nBB6=tgIPDzexvb`EovD5JStp_!4wBnLS z-}dRaW5mquF1l%l=g!Sd4(_O|Z5ffCFu>l-w|@`kfo>*d(c?o6WAlPEd^}7vyH7c` zw{7yT*;h}l?&q(S-KBLqOGn!g`|oIudmiGm+pGWX%87BK2U#3SlIVB#ta<9MlI7E; z#Pyn+x2|;KuCo`9t?z&B=JMU&+C48+OAoT>99lP4y>83OIg?IkSa+Fr`Nl*YyL#2Y z314du1P|L-bGGQBoYS(BaUEaQZpfUnc-6XH75lRjB0{1f9s8}RdfaDRVnTkhjDPXr zgwIJS1~!x2B&U45cR0jb^`DP5_G<6*0>|_py!W-V^q#ZVD$YH4egEK!2xq_exM8uu zW-h6P8Y7p_O`E!E)6AsqjVoghZ2D)A{q2M>nJ!^F$D~c1JYiz?mg?F^9~9(%pA6Si z)#!dVB(KlrjIt#YqPn!*abs;rL`YCv-ZhQj2{WfH{A|6=Tc%EG(ubhy8$<7&sJr)m zXnkZs>kG5}^*TpRFGvn?6L^NZ`lm07Uwo*+bmZIaW}ha9?Fbn6c7M*Mq0g)Kj4txC zw6^MCqGHy=-R0ykXN_U5pEs$xxcGlxYdT?5c~aK1`N7&{;|FHt{&R4}xvS?-eRi3Y zmy!m(R6Q`tadQaixji&HB%dmmRa3p42@$ zbVBcYbVz=efN2L(7hW&&-%ve0@NP+^hRvaSPj2_`zC`uqh58v2ZtpBQ`(cuF zukK1P{@vVkYj=Krd`iW%6PJD!9SHMl*J$Ul=5&W`msWdqb+K@sceJsz^Pb-wLmhoh z_7t{xsNx%yI>R>exOCzar*qZIA8pcnSAF~b-P-&2o4;J$yJp#vobf{jCeOYy_xIa` zLlQpr?v>t4H&$%#d(9v|Qt_L4HJa(X$)b68Wu4?IhdU>}rOyNMe|;(yt$~@W&GhbIwsCe zexcD>XHLJhmyC@FjxWDH#G_-!ZnnO;g@t>UMQ8l#{5z}9NG%Ji?mnZJ4&Q#XX!5i{ zY4cB4U3%G6_u=b>yo{lNyZT0*&iEPE{A+F25a)JweqrU`qi-cS%(%L%=gsL3(|!$} zv-ae+#GA`wa~_2CZrar%(B(tS(QBm&!U%Jd1dNaGgC47v#Y`cgP z+I==8W~`n&e*J+JQ^%XDc)xk-`26@@-?-$sz~QkLYI-K7CheVkyIT48^9dOc)U{8j zvsd=|Wpk$v9g|-k|ITOr@DFBjUAlz@g)BQdKDd{EkatmVnAy)$bK=57z50eao3&Aq z(dgIcIyJH1yyLI#jhm_&-o2xrXCKpoO&8yMI6P=gw$s?k!R|f%9IZmuypZb_9cbA> z`sBDiwtA`(-;D-^2iPlo{UI--pf25T_uSrn){-{Cn?B0ujT1zuXm+$UvF_eUv(VX0 z=h4jc$qmsXgg&H8Kv9y!i9Q(b7I!pX}+@dx?%9 zLj8e(=CE|z0a{gR$@PMUO@3ns$~;Rw=w@YFY*zf{!I>+Atra)={$A9tGU@f}%QtG? zy*?KG`Q)MEou}nIdbqhdT7G$W`S|(jhTj_PtlhhqwKnO{TH;C7`O5PZ7cN!Zd@rwY zcV$2Ov_V^zX7#MwU3&Rt$C#X#Ye(e8D{gZ6^dc}ps!i9pl-UytH)g%JyitBxeNk!o zsTcPe8mmfjHf-3j|HaK!K}rp;jaF>Ae#2*4Ug+2NTJbB)y)ATKT(2z8Xn*_Q#d|Ma zy;7S#R?Y0!_MU3(2T2rr--?|u*kt}D!6)62eQQ6cJf7(3y+E$FTYjl@d6hxV*2MDn z#)`vB^zzj-bS}-g^x}Ju%P&4HR=6IyHZ3P(bf>7CiTe`IcB~8u2=b0>jub6Os#EFJ9S^p>$xRudVASvyn!Yf$42Vr8%BY>gQ?KPFvnEd%?J>RZFtxdG(m( zH+6XGu!*pcp0r-Gu;%GL#~y1!d-WYWap{ionx6rJu_s>o9t?G=-fY_S+r3jqtDnET zTaq^@Bp~c)PQ#kUN<)<-{o7>=OfTn8NK71)ces3zliw1lLZz@pPIqh*4GL4U+YUQ- zaP|I&vTb|KUi5Nk-*yTIdK?~Ocsx?7zCnAzgIh-~=7x0f^j_@T{=1%WW1DSz9h3#8 zAxnRE@*CQrZQ7lNJMGpgrl_6oe{1!RFAEm+h#zR}<9n9=JwKDMV^KnC0m1Yd?W8YR+Q%mxjgt~@?hF&X2H^)xq+HcQSUaqxX^eA-u zgXhJ=7r%+}vpMqkPLs=)d-6{HIhPYF%2#@=GJ15c>bT;Nk&kwy>}lgy(IhAF=80w7 zxEzaHCEH7%={hDKs1BThFZ8GiU>J(%1O<-iPx_I%I8#65$%MPe+Oy?C{!t)+Rl`3AF>``?#synTMXzoYB7>qVQkmaM6p>2eE6YTZS4J^;ooNKbZ;}FjbWGUA)5sf z=FWbHb00mZH<_m~`Q^uhE7tVfzwYRh9Wx$I&B_#XnlNL}$W9?+`)rKt-2S6>+M{i! zrDcA|Hcq;{Hbh_l!;@;4``H>t`m6MK`*VfnhQnTS`+T^1t9b6?Q73lmz1w_Ne{owY z&Al7ft=n-+&`y2Kf;i1bPwv;M<&^K(Ja?c=vaxFMi0l~=otLCJb1KXzj3Q$Vt@6hc%zEz7uWA?9oxU8ME7LIy4zQ7O9c&2)-Vs<`cHFV z!_0Lj>s|M+t-W{ENH#F9-?{WwZ_=;a-#BL6s=G_y1>VeR)5EfhwcnubRvnCF*YxeP z+M?b+sJ(mQ{ET2_bD))ce_Vaq+ZH@bm7ZZ{i-5 z=)OFqC^E$IDsbbH$;Z zaiL3+Q^rinp0)Pv7A1N4kq!~)tKQZCssGu;5$Ai0>+jQ?Yao4w=0V)R*y6Q`-dtWvUh#x;^NB#GM zfpI4f$m!G`DXuuNBx%7Ei(NCet(&}f=J*vweY$`7oY4#)bIvU|CO0D4UH{|m!c1E` z*NvlW?xhb;k=od9$^hG3ubf>A-6vLh8B%c4%|!l})&Bj{lJkxoLqv&hDA!82csI zq*u>A9lu9SSX0sCXhWNx!?u1_QkP#dVA%Ba(=PurXV;$85s%JH>+A1l-D9RyYpFwX zr%#?REqBA#VzufLdkYi!*ZW6@1q>_Ayq;N=eI-J3W8nG?Hy1r~%sTMl*x{^BCzJHA zlwXjU*0GP(J)8FzidJslSz44cGB&KouAp7nPsg?6@}6)7-q* zh(QSXM)Qr^h^2eW=(i#qC zhbGS5dgh(-9WTAsKAscPW-1#)cpu=%0HTGdw#rW!NYS?ww?^1_Pb)&qLC|=Zs%DP9(j`TbD3c0mMa?zs}w(| zMx4A>dGJ*9*K_3woiu-6Kl#dI?7koUOTTViJ>ByNTu(K8Nng1(L#KmM^^m1uhYObW z_wVdArTkQr{{ndT$MfvHWmH_vwkX;(?v1;6ynFZTEBujio;8No znR+_-N|C>#9$CuDf--UPtsXav-LD4$8WE~r_THVU@~(^Mdvg3oQ1#d?6M2Nj!tPm_ zyQ#l8BG-7kI6P#U2FnWAQ@^g2@aMAV3JrWwG@6hlt(sXb?YWHhNm@U+QTFs}vc1^W z^!j~G-;uKESy8Y5{XK4GPtcF@iuKn`ayA%|(ZZ+s;YGPwmC;y-VLkhG%)NMsALq zTQimEhP4Uu?w@}SsZYs<7<;-J>RBv?ICPg|vItxpAIxqp5Y-YX3Vg1B0;e&;sJ}Vz zSZho@{2ZCN(({1t$>po2@7``Ib2p&Ghi4{(3Wth%gP)4u@aEuacyRFdF%6n)uZj1< z?N%l0u{`&lB(=ZgdXiNaxvo68P6*Gg?RyR)C7)J?-?mOMYt}|{oILcqEKGKABxco2 zu_JbO^xQtuB-{`YBp$Ek7i2d#YY)t?eXD9d&1?QPhT^c|ch5wZv8>TTXFbF#fb}-w z>@3{}_QMJ)!Lq}bgB(9U8hk$JYNmjCOA*(Dc{O4hA;vCz+>ZI(q$JOmRD7f ziWAdL&#f+ALmA}u>ctt-kW zXizdp81kBQ5{jhC2P*~2Bg~-?Z9eY>X?ot_SVIOU;AG9O<3W!^^ml-lq zXImD6TuNBi-%d?w)9t8k&JV2;;~Fj+^3{_wfYVMD@%>Im(LC_YhW*;duQPD5mh7J9 zq%tS6FGS2^T=A@Eb;(nIWwxLjm`cYK<5RXO+)F3oAg(<5h#d8CikQE!`E(F*v8BA* zJ>S1&_w(`~mKrwy3TonHEL%{(uEiqsi15q&Wb~TTh50y3LM^Hv_NOOD{|99cU(t$_ z>=_0cX>c|_rMLR3DY=0~1p|fYzIi>q>jiX@NUcGt7o=|E(jCf?XcRy#yY-KbPFp;t zQ?n+4KjL=p6n@=&u+}ydxzag>NWgHQx$$AUnhnLcfr*KW*NdwxKe%qAtO zsm$fo#j-%XJPxZLhi_+e+itKJ`OLdU%%$4rtICM#PkG(0cdL5*ao&yYgX1l3*`mTX%d&w3 zwwpezcYX`XaQ0q9=cBSQAyLF$7dbr^Zq=8oT{t(VhGv>PUdv;69bTPM-7IrsaTlHq zwsYKPv05r+cte8%;j2qYBuziF&3#zb>@axh6=M`8vwX@o(;|=RQGFdSZE=QWq><%w z(y>jyNvpiktIJ6H5ajLuxFm#qbEB(_#gn6?(Dve4QyuiueIB;lnoGYYBtuVR4r}WT zhW59`^R8BtA+K-5e3N0T4){wa(_OQRby8(t?+Yd^`Q-8nI*y0xM;z|k^(KAU z?__8SLJ)sir5oA!I=RiwS^lc17QBRIC}-)lY_}2}!%0%?sa=(q=9kjRrPJ>+SGhaJ zTgF2x+LSwZ>Y{CJb-js=LB6P5*Vbt|obN%IBEmO}pEy|k@!9gKF)pbzOvSc0_v9Oo zh=WU?(Qm_>u~8y>W9#zT)s>Yqg=K#%*lh3}k&ie9ZhDPpffqSgw%_y|SH5(DHlAD; zlPX^Jwdb+C>vdL)cDffb^{^fg*!vV&el?smsU3}`gvg!Pbp{5mPkc;!yMH#8bRTj3 z=v%(K*sU6ohPY>rMB=ggyIH~yaZvY&?)&nE^Vrwz_uqO3)AU8`>3L_8%{|WaqVmJK zzrLT!lC$l3b92+_`jTlf+HiGJfL-9hakQVdY*z4zX*Gt+vL5`?6bsrOB*D!V%hIEP z-$X;7KQq5*es3@lA5We@mC!Y)3N+0QEZ(d!OSz1>%*gDm`LR^%6{Xgs{!80XA@i7N z@1wqXyKOXa^&e5YGwarxUI%YP=T|ix8BZC84h1q`Q`iO=W=B~e9S;5df_&<-^ZuF7vJdg1zhW1oP5pM zQx~Zxo|?$j^gWio7`VH!Jak>r`~3+gNYlwNFxk+#cCT@%AM13Uki~rM;mZBp*Ds^T zb5qK7dOcQhii<{wgM_~-BB5`46Fp|HbW}YbOr8?7B{nMDkO+^=r5z1e9zMa@Om zcQRFopO_dloFCSPJ@XiuI#KdxIGAXde}8^ITV@mR)xa#^#tL5flRqJ;YIXO=-hu_H zHsw8sTABw>$8UB|--I|QIz1X$np=suni8~5*#el*Z3FUhTNlIm1$6f-S8r~TvN`RB zLQ`}0pyld>x>j$Ll@9k#cF<`(!{h>s;@5l%YC@ivw|zu!~_ zcpK-{u)4&li))WZ8&g%prG-im%^iats z^aXKR?r#)UIV^no#qY^vJU*q<2d~hUSL-2n{!IO$MMR*iqNA$nG&t~?0w!YeyW7cD z2T}Gj;we7Yjq*`B7tLz7Reypjz420xk} zHioY{9e->8;zH#!h7rNg|E{((5fPf<)cNAu5!7FJjz^LD`#y1>DZ_4Va(0EwbLay} zQ>u}Op@43Sw`si5S>gG_n^K*l16w@YRb*jfHy5+t;av|2WzTpscH^0|OO0)h3*Ve* z1@dr6J)ZXO`8p=#=k3)a{qtW;t!|4eIS3X>cz+uz*G^T`E`&hJ(0EpHPbh?v7>ER1 zKR-hT+R?LLv#fQAlwQo-@5Z<}=ohD+=6*j~S^Msx=DE-_Y8ck+2b)lkB4qb?`z6A! znq*#c@|1*Rmc`H)y6)fmtKref*w-V_)e@H6ccahI7PKWx>``Oh=p- zNnIe7gWfm0?+nd3nTZ;6DB2{%e!S&+G_{{m^HhSKOcuVL9{dg=gXC~p;rCkT;2fXp zZJtSzDQL-)CiW?^qt@^-Uz|SqNW5Ust9z1iY~xJw?fKorV!e@}P|#EF zxz~dj2SVbS!{g({_i_s#hSTz8l}Y@0{;$OebIET?)>>tD(2@J(}hdzhm?R{31$KC$UF; z0<6#p_QO}{scx0W3rahUD7ul!Uq-?;X_4}mmt875_ap8Bzi;mmyPByXgif56;0w;} zmpDb3=*_3o!P9q<-yhL(Z(rw^>v8O`8rvd;9XRpUwMm*n)dig zWpQ=65DD|VKXNb6-OsNx3Yi|w`v;3d9V#j>g_ZLbehdU%dcA>oOp5`C@?X9nut@Nm#rjre?xJiS~K7%oFbx~NO8;_F#jrs(c6 zvo_vKw5hUNTytk?(BQImu*A@Gt@rf`jVRG5xP6=bUTIZ$1+U)B_LPsy<6*a*+a`O_ zs$MI{?wHr*0B@Cz#M81;%6xLHdf)fZ*(ZMNDNKL>=70gI-CRJ^naag)o!E=`SjUv) zRwedl|BmC{xi?!dqX(-I~KZ~Y?7#tT@d_Xqn zu(RsyQn|fdhJGLE-ty?<*cQ+zaVs$N5MYRl+;%3=SwSh%X`nQApW&6S{VVR#YJhm` zHugi{gI%fu{gwzGuV}A=?C}#Dud?xeSJUM4;jibZPXB`Ca@NO(sSjPYt68TjTDqQv z_g|9=Qtu@SIOqHBbdPU8a_;;_uU+^A&IGz#C)83o!SEuL*@lPOed|BsI#tn;kcK1J zr$z0hW%SEMJ8kg3W+VQqAw8FouL$0GOI^Radu3mRS-l6n^r4gIb~$yDtUMu)?fe-^ z#NvIs)Zmf56TiBF(>g0GTwdMTC%xbW0P2UCrulu9#p_h=`A`wO}LzO z9!8=>=gi#%A4?CQ0@KkGC){GD%E zFVS$RskQ%u+h}|4wF>cxAz|$@9Wz~^dQ*ko`q}0XhGlN*>0?Jr5Dg?(-*p#ruez^? zYiNO8Qb-(e7IZxS-Tt>J_GQgIMp z(szG#qj%P0V^=pu>j>vVaChT={&9_fhxmbC*Y>31$56n+M+TJRcdnv=k3S&i@b6z9 z>1VjNzwc#o(V*8-F*B1aj9@L)WhhZs8=dTW`ujdQFaLPr$q5SR_HtCRHQGaQh+kQa zLYm6yvsSqKWG7O#z9_#QbbGwqmgwlT-j*ulvVEWQVcma0dDX#t^3z=1wHgLr84VAK zf>4J}Ezh0QrzkGe3BMke#d}j+_S249zoX7npP0b4Z-<^v22e)p+~4*N%JUKv;ZaYu z3fNqqQhI3YPPe6v0)n#2`(2M-Gvx8{ zF_B+lEoLckbxs1}BVW0I`y*nh znL1`-VX+Ao>d6}Z7O${?C-3XFbK%|}`6-#TPV$U>-_*?Bdhw*4qxx5%_5}^sUYF8& zzMtf>N6|MD5I8OJjM?l8koeNQ*7f%7vN!4y6BQE^6%`e{5Ct6-69p3!6CFGkV@oq- z5g-vHCqFtmN;^h=ZiG-~4ksT*-WaS?{gcI>P!_#KeqwxFmLoVlH&;{{MW<{S%5D1* zZ)sXwJ-SwKmj#PnJ{6CWSw~$L8*{vx0%33WbR%BV30xd|4=`$h$PyF>t&E zUBox5jy~^S_;dLV0pc7^E<{u;J0hGS#Ca@?L%&;xk{A0B?Mk>Y=ckEPs!}205Aa3E zMh)eSIHm0ASx7Jf10fH$PYg)h#u-*4!h+Ea8*(6iD`rMZ4hat`nLL6v6k{Aq>`Y5; zUjm}w8H71owoi%7hY$&b0j+tHuF30S(u5|bXsAaP)6yhk0Yr?a8ROwFr^7?U^s+I; zvuVTR&19gGLsNzXL*u{MI5{qNWaJ25ai|xOwT#l!j)uuki?M0K3}NIAgVJU2+BwEQ z0aZ74W-|siMz=?e-@x5QlM)&0z*Q0Xcgw1r1HJJi(IGUlk4LLEf@w8cCb^=SBMW`` z=y^FDw!ef_g+*^p7mr8iAi)+D$4ip95hxXPHhew`K_HjClIo^TmyKv)Eabdc8tk<) z7>X;!>Sv$vm(1pe`bkor#5l>Eyl%~$AdEnD=;Ol*i7+JB4ZSV~pi^hRK#)hcz?ZEp zBTc5x58=%Mh_dbqQc%&Wa)2{`4*#f#ecLJ81c0+dkfNgU3CRUCL()j_6RDG`xXfH6}n%;wp|6gG(`yc@emugHG8y^8a%7{rD*CYrV=K|3JXjfKs%hfeG-*FE;Elk5 zx~)9R=g){@2&Nc}zxx#)LPlx2<57-ilS!@j$A0%aX!cnqQo^JXrXh{g$TxDgfJ30nF=!D=K_co@5&_ks7y>Kdq^oL zf=kmgcVdbLkwDOL&JWCzbIPKeDU8}^VU4Qd(WEv0+sXNgzLF>oY?LX#d9K3|maR>c z5_98(TEwUqHMUv=>!(r3xKb3EpbA^M8*S`_@Ravngoj#SCFF+jQ!&2_%5s&w&0Plh z@-4B)CAKf)@oh+GbW$}X|$KI00vyi-)%VB(Bs+=3v$1OQ06LWhIxSi1m` z5v^yGE0|l()2YwsWuRi4@N8UQxd$2o+dz8$;=R*ADHKzqAc1xz`;V}92#rjo6^5*D zK+sxQ7ljSZE^flyXiBP-y`u8)F44wH0L1**Y6M2#2LBD4hJH1V0MW@6?1;RrG;y|idEEAnN_>7<6ZOq z%a3gS;>t2RW6672MRxeo7D9WK?H#NP7vu^u@^T2O-1A_;@bu<9yOIyVndp}xtYi~L z2nRI6P%Rp2cvs|}<~sm_`kePP5`!3$=HtC=qYiOz0SL_;mjc!ZTcA1G*_L zDl1BIwiW68N>>|=c{l^p zzLhl~@@} zAte&K%_bI<$l}~Q&Wz-j!F~;8-ePUFDevJIv7Hx&^>wMVC%(@BM3)#uSd}T5rfIU! z&7AajyUpw);4C8f8m!}72J$>W)S>A&?Ba0VmZ%#6IdAgd{&3h_ghm#!kWE04^8=uqot?5lVuouA zxk$`r6p;{0#-Mz*5>wX-Gy{`Y7K|);27{>d37_{16@-{7JDO{Ll2&l8;@4qmAcd(8 zEE?VyD0L?nYqEqxlM!rQWs>_BulYXl7^!Z(^mu%6B?^y9S1W89Ys8X~{g-`7C{dAq z+X!&lX`NyGMi>ni*J~Lmv+8qh_38E1FREETgJXBG1vr>FbF}XmA-clr;OwlKGyP-{ zGqb%};+#_w#v>us zlE_z9!WBEl$7gZ}cEEG+#l9V}lN^%d{w6!w?2RE>t67uMhW%CfbVhSR7A3ZKH^c&`R|u zN0i#RLuiR-DzmW`AZ=4=3?5CJj*BoG7mX8kTTYUG%Y2kEk|1e!R2O`|hhWscIpvG~lCn!WGJQ z6v#$StUJfsbu4cr*a*R^VCG2u5Wh2LxiMy#_B_^VOnhed^mtfFSLk2Gc}O5}8m14m zMb)Ljs2}$~VUPw#6K}@b!GkVLV396UXa=I;x6e*e0VLOKSfxN%d6l*Uq$nSx55jmw zvq59}Syz(4Pw?nMY02y={nFjvXhF^;B<{~Lht1glWHa=!Qc*<;!$JF`K}pGE1*Eeu zhXC65*6R=jNKk+q*+-;E>q%2|Hfv`_QDswEWEg&I!($K`+6T9={O+z`@eo;ADonZb z6yXeMy>uo6N=n@-+AS&Oj-OHXh1fmRZmeFXF&yaq{uzGH0}#n_I?>82+w&y`lsr4o z2=e3f)lK0chBslpXzVleIB4x6VhC)FI}jQXmV`RdE+xp~k$OU@g`!YA=TIO)%Fw|W zY^^8{_k7unt@uDvAu>ruyk~pCmocU9*rA)Im?xqAVPf5Rq9?S@H?j(5OKMnIDvYV` zG_*CM;xyXzq9j}3+*?}QGm?SsKecLE>sqLNCE3X9$YMi5@(Ej`Z%cVwk~PyKD6m7& zU|MpP;$5X?L#=;ixMl>U4FVI;V?^oIuq)tmd$+eaXUwU}8$bO-0F4(b4yS=PbgJ8w zb`E=+C9tT(yfA8%tqEeh*>IRt_kCI;^W;BRld9HhO5kBI-Jo!FZY9>NQ8Ak!2Gmg8 zsMJlM5hzL%L=u@CWPoJx6nh^1J0b(cc!FeQ&`dBg*5?CCoj*&Xch zO37i+D$XJ}h#g9aKlOsAx(nM>1=VC}^u$$A2IED^_f1_@FlI^G4>(0>Re1_;>B5n4 z{gp#5M1@;JN}Hs`aJ!STMg)TOs= z`1?iL=P)Tnu&XE^B_iMR@_yeh5f_Il_BO?gOAr|be{W$-=FV%m)E_|_Myn}{K*@&} zn0!qLH+9>MfB8Nhnw$u=y*|uFR1J+p9X$=C!WoQsb$}4BE?SHZBC7~4-?aX$cR+iq z!tv7Z{#HU(Dn>3@lqip^Qu&#dJ`7e|3UhLi4l_ADHR{Cc)QlZh$2ML(RZjLjYMizz2wC7cK&?(|Iy z%su`o0a+Hyv<9!03Csq>+E!EK6%OKlkOvV38qDL=bZnApKVkw-Eg1zDC`l5@C72B) zs^B{wiV2mVuio$yfjO+B(S=4K)g4Ad?H(l;ji&g`IQYDgrKfd5CJJBTJ&1GGJa<$! zo)Tu$BLzuV3Jp7&JyGuvpumH|ARcAj|K$!5ll8fMW@KLsXJf2zOHC1~*Clj<_3c5h zEPg$YXem&Ce2YK4$WmRBg2vDasvj=NI>G?iI5}FJ50}imTW5scYgba8xDMJo9U$rb zH5x0PnlZ0IXO=HPy%E86OhnyXs*Ki19qpnGZ~;OIfK~bCnc(iAeL-ev{|GCkEo~X$BS-jLhn$Z@epKyq?d^LZ0zdEq1hy=Jr|Cx0n9b;ydcRH3wbv}%hoz>l^4l7fAO zS6*7yq6nCumNhE2lr>zgLpGn4OIbNBhZN@`p|Whe8*#DAA${MF-5E{FiH#tO;YcxH z#Xc5nO%I!-iOQb5{}XN?lsp=#<~)VMHNv=*j7Xt}{G+RaAWLLJNiuc>y_jCs>b#UG z!B#Y2+av|lm<>DQNGVBa15ZbSpQ!fuy}0dd5ad@9IH-*U2{|ndy?Q^dTbab2CB>IF zvoFw+9Q&mq!txW^eHh?fNtd z)w0{s&b-bSJ_=R(y>#kmkzuSD(1bZFmS(cR!gM^S0qLzj@SX4AZ> zS?$T;xkqz9Q<0Pv&SNTZB*K}=R;OV|1~W3f&0CCdP!)ZaQ5Z%L8yq$1p4ac3-jo18 ztOlMcG$K6@iRT#El4?dwH9Sj)COk%$ni`29SG4=ktUkQN9Rla&#YfuK6t2v?h-L=% zh_t^O${Do<4RC|x8`p)!8XA$7>+L!l(9Ju{W=AJfd^b4H=ENHkOif%|U{1ex03U9* znOh-wgZ!tZDQFNIM%m}f^gHZ5s_B2irMpHaFg6{uad zSQGJsQ(<$#d3@Q8$i-wD`3zE_0%V)@pW*5$>9_or6_mL`iq!I|x#RZ>__l=g4>G=!+<%N2b%h^W1nOoLS81y-n0hBlt-MbITaIK=Cw z(HWG|40^Wu4I=a)Oe&3mMZ0+^(4t4wz)ZzFl^wzoMNEN;(cLV;yrqf2l7ht(A7C7W z@W8#{#hMnM)un89(T9wDOi{unLXXd(?dR8SfU6}P3a<!n5wvW50FdFi^VEpv|)mj!7b5wXVnw zMG*X{`X<6B@jDxYvMD@aG%hv@kV5Y3@QVq<(Yf#<33N*XgO_Y#{yP3^B{+8n zdgc`i(=miMmCReY{JtF;@tEBb$z&9b>d>6t#7^>-0Ypo|m8dnDm}$^pf*T472}-{` z&Tm``)Bam;=Uz0YEDfm7dLexAI_SvVpFYW^06dbJgCP@9LdnmgV8Yap9NaL|6DiE! zT|8n=j-jV~F-J$5QSc?P0yS00M5v0Q7~N89%I1R@)o7*1UD(P*<7}vPY5D5$-|Hkp zDVuK%VxxoL`m0!XsT^2ikR<3DMGBqyV!uuEatolu4gSa{3j@(Nmt|CITsAJcgyoVcJ&C?2*bgzfaa9!GU7%XbPa- zbB2=@wZC&9gI&nt8hjg3sK@DpjSjx$-t--a<2;esiqI0604EJuqBMfMB=H^!0T!lE z$-03&v>ZeRapYee<2}h1Q&6=A_-P&^t(>b_6G0yG1}PF5Fmu*$>s&QB5~T%4mC;`$ zX>lIPck!+o4x$x8PSQutfN;2#m&V5x{jD_{RD=X~iW;nF_iR|M<cZe8femj z5i&#$+g~Om`e!QaWKG0^smk0kkUuvW0uzynBCZXNM!Tw>u^JQB73xC10iUkzL;ZmZM+o1;oD7s6>Y?9nLOjeULx>5-Ld}njgb1ziCK_hL zDks^X5JsfayrfLFcyfbDS(6cUn0lQQOa2TtYoGFSQ`{4_KU2;YyIxE1yVT3hXVea= z%?$o{ILmjyZqd@BqH^GSvFP~t_}t>0&YW0;?uXEU!Sumsv+1+`p$74RdAvv{kN6=zkL-1f&O|csh zYs;hoS%M0mQeYwmLx`Nl(qI}~I^lweMKLSF7J<0c5Q(Ox`!ssK+1P_$&|DPbqf(tA zpqX-af-87)rbI)e`$-Tsu-K88b_;{cKNqt~BbrO*SjW38^!v%*KEB1f$E$-FolqBr z9D{xWMi5t_4^CX8Lz^H{Vz|+U;|}U2G7fX`TciZfL5xVUv(599$441MQ+z2Vg`q*l z59{l%k`BdgM9P@6ejFo*A&VdO@c+Tw?+`Za3?lyk2MOht+^*rV{2l`F9>W438j zXQZ|O$`pd6t*Ya{rF$Nx4$8>*Ev-3{~F z&W|px#_#=|#LKMjF8qlb$B9iDP(r0|>B8%tITM!YtTse8`c@rZTit(<5hg;xU%84` ze+g*8erx>}3OK&g!kMl$_Mrn8MEkuZ!bYWrl#GJAk$408u_^_3^5;tz(Delbu+?QZ zp%zbhcWQ6qP$Ak$1Vp$LxHE=uMUQXpXPIYO%AY(YEv>Dlgy`QNg>c~<21`gK}R7~)UHczC~CF_a&COJ=YC*YlO+F~uA161 z7!M4DgZauyXvN5jtBs9R9AaJ!xM|9zSxTu%gai;zUiwbUtrXIj5EM00*CMxaa&o16 zqhKqJVo?KCI0d39mGP5y_Hfn4IE`58_7Rj}sIIZ~hvmZWAy1;ClBd()-$BXX8X}=W zVXV{q)S5#$#Nv7c}5hsV5Gahh&97krIv~`J)q@{;pwJv1gCFRn+J%#3z zC4H6-l?a`SBa(MwDFROSD@fh+xx5g4ew%c2LDZjOYCSDRx+Q9Ag)Xp#F>A6VcGDPq z#wTy?EUF0^u}#5~Ko;x?j3&o}B?AZ*&!&Vg1B|-dFB?nr2yBsK@T8g#*S}&l=S>L# zrG)9Qyf;&kf)1g-kPTDmcwxoYbY=S3ir`++f#oy&mBSC`yEjvTtv@7#vH zQVLE-e1T3>KuMcj4aKl=X?&iCgB9a68HU7H%Uv6vfb6a{=H^o!_9^UEr6riyuy`6P zQIXR6*h`3Sid5Gh5T2;2R{AxY4s%@RTrvO)qJe%;swik$4M6My90I0<*1kB;>VYMNEEgD3ChMM$bz0hMRp+Rbe?DLfnRci29-uzvcpv zYLTueJJ=d=MJV~m7+=~zO&^iiCn6CNS>*9W*rjN`y$?lp7q%kadw^>{JNh1{F&FNRdfCH2X0LQYSdYppoL;;$;% zdp^j(oE}3OOh->|*UHU4{jDFhR~DcX8?reax!$WS?w18bcke}EYS1Z_(=DDa=B-K7 z85j@8W|fX7*_TQ?Q{p40XNJ*Lg-c<5o-RG4!%ng;j8;bx!|TBIL}&u0+e%}b!WK!e z&;Cw^bWb!63BfoZ08d<^4)5h4%`83u4ja-uJdIisM?C#WzARRZP37O z;-OrurA0puxe8u{aJksvEbdUPBwWaZ8$1p}4D_c-Mv!=`QyCIHG9f3a-giz*Cs)5j zrYcVQ#_ZWy$&*@DH;Nv1m;m>-35bJ*jcN*iiCpQ!2v6DB?}LJS6-$B}o{A5E%TH3> zJPkG5yI6@q+hVfBaiYN=#~lr}l}mteY3tLK$4>j!G@ytEjS(nVNWClQO)5`rC_{eC zl+p!1teZYv461^P7bCXe!TJxujw#|QD3D2DyjwyzT{TS|XjxVQ! z1PO7O#HdD35w0d~sr;G0&1agV@aha8`K_|+Hfva{tP5!+5ya-Jv&aa zA-T8g8y&0j8*I#`AE2~PkqB4=@k20&G!)Sz&RCxnwSFc_eXUXuz;NAxV}Soflq*!Y zA!-6$Jtz)Km+iJsOv1UDqbv;d+31P-H?-tX$c&i-I-V&~0@^l*rY;EaJris2Ek@|P zINF&YbV)==scAb$k$71+aYdjxA&d?+OnSH3{8NQ<($n{f)qCY6J7 z5=eYrLpO(rQc!=tg9C6-9o`d)X>LNQvlO#gi}<*P(@s0mn}(K5QlHsimL^OM`J{&L z3_AH3dmFb&1MP>L3fh$}E)eV(oHWm$(IVc8I6`v#P0Ea`P$=rT`>x?LL@?pW*v3_o zJFXG02_&Tp=P{$_mJ~u!Iw~la+q4U`UJT>wJ&0ZTVjIf_4_(Nq{v4ON5$>MK9-FS4 z(W4#3w4^;wr*M09R@TI}6b|`-a}Q1uC*7jCkP}5xRBNn64iZgW0 zFGeaR5`fQ06ZyhQ_^lFcQ<6v9`*QReovkoJxVjknEQm*qMMlaWwyaVIld)p1I<73E zr$|sFQv-;OxxK3`?2c+6W@$*D^85lwwL1z>k(E>jB9}ry;J&5}hk$^1MSu&dKhVE0 zBoYA30D*~6e=tY^(0@>XkU)r6$_p5v;9h0^E+O*1mLPzC+5rMynGgUsYY$6HdyBuU zfcbyX%`GhLP24SBRsI0bYgK^ar9ain{v-mh2>(!og#M$x*E;{-fF$}V`fBy{&&^uqn#rSjJ{vN=;hyVcMzmER<1F-tv7XIovEf^PgLH?!2nI{A-uBj*uZV}Hbi4g#J zWhqvYf&j^{ZIJ~hr3a%nY8qUC8TbGo@TUtqOYcbmqOTHQx50psA^REQpFJjf6$1dG zdc=7E&aYDeYy$y|iW~{}01K}ya4oN`8ueyT2hjc{0S_h+#i*D7@?Ryu+3;WKUs;Sx z0KnH;gaH8PSB6b~BVg&TYG22}EG^~Wt5CD}EAKz>zvY2H#vlG4WA~4t{>O;?W19S9 zH2*nH|2{STF_Zo=djA;xe@qq_rqR^@t={}c>3`}06$x1dIP_O7NQl4V&}hNy8Up~3 z_94I%<25L{gM*^Fl(dw(l(K}>UqKNYyhdMRowol@D~Be;-CEXfPeB4-2Pck&iH4UJMN$5f`)&V$4dWM zK4|`D*|P7SWu&kFEUz3hi%H8$iUVGp|Lz{R9 z_eb-;JMF&u?_Yx8p@#s#|6$C`#NNWu+{6{k{sUf({%!b`;Oy#TZ{h52V`gOKYGLK% zYGdN~U+ocIw?KcJmjrJ|NS!Q6-K}lhNS#dGEL=VR*djmxQ2waW&E4d+M)SXS2Cvoq zd-L4?XdcWTvL4_Z;BH|4s|@kK)nRVp<3{Rc<7j3<%ErgV%EZbBevz`W^8fLI03ZV} z!E43swRc9ACa)V7urdIRR0M4G6+{7?hyc!)zdZcE+DOo;O%{$y?F&PI&-F-PdnV@Q zMoyNN|5|3RYvONvuU7w8c3yd}@PBe+gY}0!s6YDYVDslH{-5^mwM~UoGolb+|G)an z!NmKO^gs5~s}3p3vgj-R5B&f4fTp^X)ax$#?=!*QIO}x@{1-44_HUsA%>VluYH}%Ml3vb5XIs2<5&xSz$;8W!DXY`pj(AD6tOPnQ**c@f9Zb_XHIwx#k{uztwULDQzF4VR2%pAb>uN%1`> z>?Zz|nR-H#WEg0AkkmpaW*?5|Zzc8%&31jVG~B)z1$b=`1UGtHa0ihQ&1jj&Oj1CVa(z-kqXx3(Hs!e0ag4^oWTe^xC|yB)va5n&loyd?|8NMzhKcI zv1H#BQXidy)6CAdM1;gdG{WA|RK=204`7O54&ah`Y^v}BI)kI`z5J^W35Z|jsjOY; zO}F3$g?Ts~YKsG0@K6KDoY%lN-gWlzz(vzhmqKWHbZj1Y5#BS{((fr_QlV&bg$3l` zQ<-OsX~LQGB^@_g^zO+a?aVK3P=uqFj@BSy`Db#5ae@ZuOH&Ne{adk%3cF>=)q%2T z=q9)NbAg8;#D&5!S4{VUKG-t4NZp~8bl+#s#@38f-pmetu0S|}QF-HVz$VH7*#;`8 z5!W57intZ~d{Xii-3&x)?H25AB1&cBiPv`{+q_+JiWb6(@ZKg%JxelmkSLl;$%Nf? zpF+k&DJ8wDW9moG8B~0{&V|%!cPS+aO~KAe0fvFb90k#AYFqS5wR(-zuBH%>ch9rb z*(!rF7)s{jpaX-rBnFT3fKB~uGjgcKnRb=^R60`%ym^MV<5oyWd-H5?In6!#%@_aD zbXhleP&y^q`AK=mwiCx%8)(sNu8SA@c{LuNs7e@j+rW4 z>7U;3mFUwFaT`LZs00ZLs#)XAsj>%%%0pBPh zE_5VbrX`n3CZ`Jh)Hn5YdfiH!cbdo8O_6O80#C+ipEC7DXptVjGXc?mk>t@g`6fV1 zsRAed%YH1~Ox#)&1D@d-SD}?>D&6za6wW~#Kwc^l5DxvS0GweR2^2}7K+;TCaA z^LhDxxsom2tjy(o{o9x2$#pMo#3|@`pGgCpG;H>zI_c#k(yF)T`JgAG;QoHa#K?fd zdqfMex4U>9r_3+N^B|4H=wdx7R_J`-Q&LZm+6WhpU=w{on~_Mjpx1k^Nk8kcju#(> zRY)gG8{O{JD5;n5CaXDzM(uqD{$-rd4?gOj?%WBZk zPJl>X&0+0%RHzeH=&5q36G>=2TlGlM_2j^9Ps8^Pwl+!{*pytKmmdPZ>$|K9991zIi2NE3 z_n(3tKB09@phDZXW&G;faqa)OS3%X~1hkKHe`^FM*4`=lfaMdyQ~Pe$z7f})(XRO} zJ102)E8m>;E?shCPewArai`^(XDP5h+)x%8kX|ec9Sna;hGpF*=w(Ma3>fE=jfO~H zWToX$UhunH!2s(JZ_k1J39JsP}}@|ChwAZtj@})27|(D#$Ca z-Dc}|w18P}k0a)ma)e1&opkZX>)74`3bmtiml#Lcc?6Mt{kjm_oZSBb4gMFMFK@tY9?^D?)+@7nn1l;`t(j~Dg=JAE!T@i9== zcyC0%%{)}cKbCKut%3c?$YTxmXIuI~^IcPZLHcoTqxT|eoM3OSYDFtFi9uf96WE`b zdFzM2a+$C#oO)T0o-Su>yryzR2#V%Tes|6ViB%9qqAGVC{sai~{<_fy-fpxIbQ5ia z&DFWH1W$z#&f~tU86|+{6z|WZdu(PFkiIRt{tbqHq;7jDw(YlMjF$Qf4ZmazPIK^j zOTo}A|7}$UCN?f%5F}&F+kAy4(%$j&_Q~fy-4=KJQf!aPI4aJ2M!pGHaAksT}8o9A+f;8}T2siY76dM!|su`SaA{&&9gV?)rr^25f z&yUZ>X!s23sVa^#x7`C$bL()@<~*?Y3`oc;?69|4LSHxu?&3B?zKK1P->Qg^1CY(Wwm&c6dGkuZGNx<*xGDEgKGtk_CIi>WI$^ zTvCFR0d&~l;vPTBMOUCjO_fenO7_#2uzqquDyazrHB(J6$e0!*bHYpcCaXe4b%A1s z^L*=F5cOJ;gF=V!&e$56H0BX8Ezeit78R?aS} z{W+M(kAkK^ro&bq-7HTjG9hvOF1D|STJUic4X!z=vx8(VFV=yPEee%aS|V+% zVeOZ943MdHC)5brWMff~KC%~^qQt5GyveZY8-E@|QIep5c~(>_-zZoo)-%Q_bH81X zs_hpp@RSZADfRw@LMc6U0m6=B@rFR|AmsLkxPHQP)!BqXNoVMd1i~w?h=oAW{3@&o zOY>A+7?2a7J=!QG!nFJ{^59~F$U9Y95FeuXDS%chWL9&d-p0ya!l_b3%m2xu-&pFDq81SkxvcC_bM&@U~Ham6}1Zq{Fj zLjn^YFdQmzAy8{S?b9XsstLK7hGNtgCJrXzctnCAG{P9Fq@BVTM)#5Y!6)3rL*A8K z=G|jF^ENxokxHY3lmwLDIPi^-!%c3G`#QAyW%USW3zbw$#pP9TFb9!Pm%T85% zT^9PRt$(jKO|W!f{`Z1ii)$+Nfmkp6$|j`_4j>^?i&6QK^oe+mv-4{GXY@Hg zqp=<9A#+Rj;n3uY0=mzN2ZDHdw_~cDc;*kNsd9Ky6zFn`-R-5(t5{}Ct6dtorgF*V zb*$Ac(xvfGve%wJ;R_X#;%X6i6LH_egyT_v-dPZ}z>P4<&fl`kz1;2nBX0Vf}>@3`jtTN3HqDOZ38@mSmTn682X8PYGs;DCFZ4+WQl>T zqMyx2sWVsBLBQgeKta+r+pP9wPKkjjxe1Exkf%C_o>M2@hwfvB9+#juLgd(M*GfIi zGbni|ja>?*zIW2?m2FzIEY7zRx^79YQJP6hbWfpl+NS^5!m2o2rkq23t;JWnU~2YT z*M?5!6s<&JMAsD_4=+x~R;TE=y_ZU*^hne3r+zzk*C3CdyX)EaPs2`1mG3VdK5r^k zHtp8WzA1ffQ5}N+oQ8BkId?&cqJqWS{NZ*0ho1I|^evGx*6O+tQF`A3QZ4&<%h>XogTFJF( z-QID>lCkOylG2U0MkV}JhD2+4#J{%X43gCFU`w(wKqCTI^64W zGfxJh+w67Xz297GKA2o>GgYcryLWT8UEpAX=PN4D zE>_1?L~eE}hj|_5uP?rTdVSbMq*r4*)8Mw9VCY+KH&?SeSGzxb2wX|ycUgzD{=B@} zZ@q=}yuIPm$W=_@vKWeaqA*$feAtf(W?5-598J?x|2^#^aOG%7kPTG&0_g3#_pSbD z^~(o7hk2iiA8W&(ZgW4{H;|O-R$D`z`&nw|y|(RYq3&0^wNs_~R}q4Hb%5>m*8+`N zd#KmR)&#`xwzP52dL&)!a}Wk8kG1O=)Pv9d+j*MzUj5m;_kQDk3$0Qzx83aV&#Qfh z$>N!NSVALBo=3pi4@4ZREk`rK{qb1B$ErozCAxr50D&4{bv9FbOTiTO3oSP^kwRk` z62USQuc}OYB2t@{)Y3)$&{^x!joST*3|=lt5$S8@t7dcrq8s`gc1eZcGw4=XmYZ~8 z`T{cv#w5Q4#=S+JTKXLP`W$cx1U&DPuYmJv?`W1xB)Q<}(Xa@Gi0^ghd%V``AN?>O z=NwvZ03$k;wCj~f5#>|?x65NIQ_V<;E8u$5kG>*Ei-2i&7!2WL9-Gsh8JG0|PmI!f zm+vh*FQS2>XaERbp$#zpG;;-g%YwEG?{&gsa^2(-WZ#o*P z0Qb+oI)kqDCjc+dGg<44E7ImzW06?IBzx+3V1%Hh`5re9&A1k!R?uzh0g={H*aoSd zD;Uft*()UMb-cl0+AVZ6zy$z94g>vgg-$V%-D$Z~?gOn-nviE-EW_}#s~wkN;j`_L>d|ItWQTSBSom)VlAVyCo&AnMsIEd@=42i|f?HBmI~;7Ns)3UnR4D|$ zf;6AfJ7qG8B8O2)pyf0uV#0|}NvKvoy*NBDOO6{uw-49K?#=CGvoX>v9<#k|#3 z>R<``DvmAbeTas}fH`w3$x@h|-RzvepJ)-Fe%PaA8RN>Hj8LBESs%=IHo_HhSP{Bb%E zhN?@7u-c5C5G=2+u?G+J!>JKI_r=)Z^FCf(*nC%x-tx_t z)HpO4ntSljMlv{~M3YUc$U-0+ZI{7)nqOG#0(GZiP*#vRz#$Xz{#)PmD9-k}ajiHw zeQ=LTp8FHY1Z7SHeZ!y&kK|GWeaZRso@|}o7_Rr&bd6kf(c=u4_Y8%H-A|k#8q(+_ zsYaJddf6hZxoG7vBXdpnehoWP&}E~l_k>pXg}9L{@$L>3$ai)VI-xqudRsJd3G_J&*Fz- zv|kIi#XgM&<_e)?;mN2hghk5B^1Z9FOIlCFpiWTCEOp8>hfKhLUV=q2(E|@M4c&l% zaYA{I8Z%it5!B#<+Q|@an?Pv&vR>zGd-a^)+60P6h+w4n#3XGIsCzQ1( zygB4fx1lpQlg61*9^`Xd^41w0)VGNbUTz;WoJj+FaQQGkLZKaG^&X*F?;WcGRjRyj zqwh$TazZ*kmBYqZ=#8r8V@yJY9{*U^wln(t+8f^npGX^9Cw@vzv`mSnUhtIU9&@4k zI@wFkwF3d#1WOG1lrK-#-t~ymKwfS1jrI`ec&LMjBP+2oHdHcD>>9thdm?&St#z^`RD7a~=68dhl_tQWu*|U>8pJE|i1s7@z#g_7~QT%;R_+OBq zPmq6Zy0$Wq4-%`;z=|wm-+HqzfgQ4 zpV%is_m5=qn@>d`W=6?OB&K5P-~CPY-YU-U^ZNCXiz$(DCe&p5<2a3vEk7LVne65f z*kip?&3%*2%@h!vGRkVWen52@llIvHJ1Hl6>m#-$ZK^>30BMbrJ)UC;o~mS)gW;L{ zJrcLYQf{NRbUWzA59@uzWyYN27j;*z+?8H!uLRTumh>B0U-n?oMV)}#?~$Ra+wf$w z7&Mj9f;Ap6Z>7t+2`FG}Pv_Yy$?Y_*iA#nI&*pvbk7?Jdaj8jD zq$S)S7U&v@pr~yrC_NvYV~C3?Ua5bckBi60mc}Qc^d1W?BrUn_ft-VOFBH9Ne)!35 zTm|pWNbOTK{(<@JxW(SdctYBgag!Vk(gT zR<%E7PD=ir&}vUitVUMBexBDPGlev<`Dn7n-p?tBJx5zjq)Vfo^Xq9k%}~jvrHW2l zWbBKtr7;#60$GoW8ywm4eHx_7KI?;11wX2k`Wl{s_-{UxUFffRnJV;8^Fn?oR%^?h zrp`3(r?zH9hJV=~^O$EdPVj9Cvz-k6FzX6g4PxibXG|`CX*;m+ew)xY&aQ3fdC7F+ zgAM{kYLWSqhCoL@OShQr%fOw%j;}oN=NR|ZA?%b4L`jl*%%diTj9AY(RhlzTwadOP ziZ6%WBXbB>&8T0l@FKjxtC^5cR=va^b#Dy7-Pj|d=v}|3xlpns7^g0ofhYCl6cg=w zo`mkTB_uUkHlxna(sZM{Ca3?2g1_C_p?~c>aTDdb^9$7lmEF+A6(U?rXOhc23rpi9 zyBOB#eA69yT(nWs@ic^P$yiUCXQ)dPW|u^_lLhAm&sgw%QM>BiDlFxVf`-x19=}80 zrI^o$gV>AqdyZZDhX-4^wlmRW_0u4Y$&yiPQlm-YzoW-iYr{hldPHCT$s2+C(y33C zvfEMP7EgOXX^99*7lyBz(7vx1p%Ut`~Ydzsj1bYGYsM7%+aKs?~2zN<_nD%>S1 z%YAR@3IwEaDN4S{pZ#rZV#QaKs*wny4f^!Nu_pC}m{X#&aAb zX0*qH38-7`WWir9dqqT-IdhN0&WE1@ulnq%`pFqA8}w{BN=49)=q5_Z4?c)319j22 zot@pfc167VMs=5ewDtm@IN#&Y;o(T0SGr6qS}a09m?69z=r}PpQ#4(i#aBmuloja> zfrd~{OEdz$3IVo8$iAkLM&OtQLrOO}aUiMs+LM9Tnr$qXjH$qDx%NVp@fU8BeKM~i zOpf2YLkMDs@+m<@^QRhJi$jnYzkJARn+QV4>Dvz&<5wA$BQTM6%tb{Sn{6PGVO2_e zlo5T3&(3-|K${CzIDo1_Z|NA>&q z-$#4aziYqGhX6(6&k`f|XA2GvSNm=o&Gc>?h_JW15cmgye-QYehyak~2<-`_l_G9i;_*jWPazOyq$bvp z#LCPpeWb~o>@vjf&)>N2h{HiHgeG7f*K*C34 z4Z(Yb^If4DH5!)tGACjjuM%q_`)uRpb$QArLTuc^#f`#On;XUqw#T-0sSl)U(}=}X zcsC1BH#g0M#|uCmdYk;Y3E}!fHcHO~LHBSH6Ur{+bVsrSaahoH^3BPi1*TqG)7j(+ zIV(2q;+M+BTiNlcWTOnI0&XUPCC~GEJfu}Ev|Y8OEY24A3j}N1hPE4zxLo~$9y=Oe zvL>&xmhQ!fxmuCv^LDaQxPVso`g%*pq7X}mW_T_xizA<@j1pB}a#|R=?QcIEWT*%c zx!8ZORqE$)_84TEsoZ~+D(3)FrtoRbpH!K=KRD}%4W*d(vu0KmHryDA<1KP)9;`(Z zar}&GN8z#g0r#p(Pmf?^(#a0*YmL&J@-3Qe>$1D-%--`kQ$#Ic3A@VXHPfjz-<`qZ zW`e0Xt!9lzkIGYnmKYTi@+tU9&9JtkU9Su+4Qh6t6BDSx)^xvA6==9*&Qz_oOg$3s zWpMEfOSzRd@;9!oSpCPF0gnQb0|O3h$! zvU|sg9j>3(RJp_Ki5=>zydOv{So_F0^!kfEk8ik_ebcgiar|-+TDM*!w8o9&2)mno zKmd7H8N`&&uR2Zq1k1`kDDU{rOqI!`zwBe(PQ6`kFaQ!g=-O^l^?i z$AX2Y+e}QrSD#nUTcD*(HQU}hLAFrhF{S5?_1?9JZZ^B|A`^_3<`d-aLtb|h-@Gi} zq-oAkk(~9pX&(4VZ1|dY7lL_1K9T;@@oGQVuAzhL=4VMlRO!pnwZ)q37mt%ocX|-K zSrsN6s5UBkbTdjyoKT8s5D>laH=MdsB&Kj>n<+=m5zSG)dgog}1Q5s{<(|{SBOsz+ zQ;NO;ga-aqxj}j0lo|TTGgtKTy^vjmDu;%J1x`LynZnX;VKhqE78pOtH%VBKSFiF+ zo6hf*hgx3*wJ$zQVPwC}9$daA54G`NNpnO>I;0D?KrxI|Df^qU-&n^$662igRazR<()?vR)C4aN~z0x%K z;rd}clz(dogdN-K!s){Ls{iKhe;-aYgkyyLjsi?z0tV2XG*h@=6jok1fD#H&{%5b; z;k1AXOd$Z~Tg|`gE!;88$D7|e1fm*cQE()%KZyM%ZvIjTAOzZQBCrD-T+R2XaX=Uo zr~wiRFoI;!0qzx%hhi!0;y>lTg211G0Dqx+C-!$jbSLI_LUSkhaDVQmJ0ZOjtUKG` z&i=Zya~|;(kNsCx@=u@t7X*G$z{TAEc;fNdF3B4v1OTIeyE+@2+8Bag0%t>aV}O)x zVhjG8CXEFQ2m5fsio2DwtDCVcvpw@K3wS^!D_#*y?}+Bd`BSkay=|?$ECO(|h(We20)1MGTAX$NPpsj-VICB1|+1Lfby zQ~n!yN{zo*_>{^343wI%9R#Dz!^Q{D2OF9?gMkx(%kTN)|1lmXJ0}MV2L}r$za|H_ z00)l%CnxY7L-flQM&3946)s~7u%Vfo^DpZEZyzv;n!mT2>Qi=peo1(Mb`Bfyt`)F; z=ifpWFy#ImnS-DIDU9pS$*1`@*B==9H~M#d0HOwfEv)=UAHeJZy#Ov2F7|)>_}%lr H)A4@+r%wR= literal 0 HcmV?d00001 diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 90bf6b70..8f592056 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1327,7 +1327,7 @@ def extract_light_with_mask(self, mask): ) new.sign_type = self.sign_type if nb_obs == 0: - logger.warning("Empty dataset will be created") + logger.info("Empty dataset will be created") else: logger.info( f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" @@ -1353,7 +1353,7 @@ def extract_with_mask(self, mask): new = self.__class__.new_like(self, nb_obs) new.sign_type = self.sign_type if nb_obs == 0: - logger.warning("Empty dataset will be created") + logger.info("Empty dataset will be created") else: logger.debug( f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index dec9a6b0..56e0f67d 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -1987,7 +1987,7 @@ def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies :param matplotlib.axes.Axes ax: matplotlib axe used to draw - :param float,None ref: western longitude reference used + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param bool extern_only: if True, draw only the effective contour :param bool intern_only: if True, draw only the speed contour :param dict kwargs: look at :py:meth:`matplotlib.axes.Axes.plot` @@ -2082,7 +2082,6 @@ def inside(self, x, y, intern=False): :rtype: array[bool] """ xname, yname = self.intern(intern) - # FIXME: wrapping return insidepoly(x, y, self[xname], self[yname]) def grid_count(self, bins, intern=False, center=False, filter=slice(None)): diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 3aa43387..2914df6b 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -445,6 +445,7 @@ def loess_filter(self, half_window, xfield, yfield, inplace=True): if inplace: self.obs[yfield] = result return self + return result def median_filter(self, half_window, xfield, yfield, inplace=True): result = track_median_filter( @@ -501,7 +502,7 @@ def extract_with_mask( new = self.__class__.new_like(self, nb_obs) new.sign_type = self.sign_type if nb_obs == 0: - logger.warning("Empty dataset will be created") + logger.info("Empty dataset will be created") else: for field in self.obs.dtype.descr: logger.debug("Copy of field %s ...", field) @@ -567,8 +568,11 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): It could be a costly operation for huge dataset """ p0, p1 = self.period + p0_other, p1_other = other.period + if p1_other < p0 or p1 < p0_other: + return other.__class__.new_like(other, 0) indexs = list() - for i_self, i_other, t0, t1 in self.align_on(other, bins=range(p0, p1 + 2)): + for i_self, i_other, t0, t1 in self.align_on(other, bins=arange(p0, p1 + 2)): i, j, s = self.match(other, i_self=i_self, i_other=i_other, **kwargs) indexs.append(other.re_reference_index(j, i_other)) indexs = concatenate(indexs) @@ -578,10 +582,7 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): def format_label(self, label): t0, t1 = self.period return label.format( - t0=t0, - t1=t1, - nb_obs=len(self), - nb_tracks=(self.nb_obs_by_track != 0).sum(), + t0=t0, t1=t1, nb_obs=len(self), nb_tracks=(self.nb_obs_by_track != 0).sum(), ) def plot(self, ax, ref=None, **kwargs): diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 56fb55e7..abe8becb 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -865,7 +865,7 @@ def poly_indexs(x_p, y_p, x_c, y_c): nb_p = x_p.shape[0] nb_c = x_c.shape[0] indexs = -ones(nb_p, dtype=numba_types.int32) - # Adress table to get particle bloc + # Adress table to get test bloc start_index, end_index, i_first = build_index(i[i_order]) nb_bloc = end_index.size for i_contour in range(nb_c): @@ -918,20 +918,4 @@ def insidepoly(x_p, y_p, x_c, y_c): :param array x_c: longitude of contours :param array y_c: latitude of contours """ - # TODO must be optimize like poly_index - nb_p = x_p.shape[0] - nb_c = x_c.shape[0] - flag = zeros(nb_p, dtype=numba_types.bool_) - for i in range(nb_c): - x_, y_ = reduce_size(x_c[i], y_c[i]) - x_c_min, y_c_min = x_.min(), y_.min() - x_c_max, y_c_max = x_.max(), y_.max() - v = create_vertice(x_, y_) - for j in range(nb_p): - if flag[j]: - continue - x, y = x_p[j], y_p[j] - if x > x_c_min and x < x_c_max and y > y_c_min and y < y_c_max: - if winding_number_poly(x, y, v) != 0: - flag[j] = True - return flag + return poly_indexs(x_p, y_p, x_c, y_c) != -1 From 5f01ee9198568914d815ebef851fdf8e04cfee09 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:17:58 +0100 Subject: [PATCH 056/115] update python version for doc --- doc/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/environment.yml b/doc/environment.yml index db50b528..7dcb504d 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -2,7 +2,7 @@ channels: - conda-forge - defaults dependencies: - - python=3.7 + - python=3.8 - ffmpeg - pip: - sphinx-gallery From 4ffad53976d4ab4d54d256994da23dff1bb74b32 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:20:15 +0100 Subject: [PATCH 057/115] env for binder minimal 3.8 --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index cf1de6f6..4ea8f840 100644 --- a/environment.yml +++ b/environment.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python=3.7 + - python=3.8 - ffmpeg - pip: - -r requirements.txt From af8e44363d9e166b8ef4e8dcd2c458654cb289e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Thu, 16 Dec 2021 09:42:05 +0100 Subject: [PATCH 058/115] Fixs (#121) - correction coherence forward & backward, when time needed is shorted than time available - bug when extracting zarr network which have same number of observation and number of contour - expose underlying parameters to users (min_overlapp, minimal_area) - correction of bug in zarr nb_obs & track_array_variables, if no vars with 2 dimensions was selected - bug when loading EddiesObservation, rotation_type was not loaded - bug in tracking, previous_virtual_obs was not loaded from VirtualEddiesObservations - warning when loading data with different py-eddy-tracker versions - changes of extract_light_with_mask - possibility to select extra variables to extract --- CHANGELOG.rst | 7 ++ .../pet_eddy_detection_ACC.py | 13 +-- .../16_network/pet_replay_segmentation.py | 8 +- src/py_eddy_tracker/__init__.py | 13 +-- src/py_eddy_tracker/appli/eddies.py | 18 +--- src/py_eddy_tracker/appli/network.py | 41 +++++++- src/py_eddy_tracker/dataset/grid.py | 8 +- src/py_eddy_tracker/eddy_feature.py | 4 +- src/py_eddy_tracker/observations/network.py | 53 +++++++--- .../observations/observation.py | 96 ++++++++++++------- src/py_eddy_tracker/observations/tracking.py | 20 ++-- src/py_eddy_tracker/tracking.py | 3 + 12 files changed, 175 insertions(+), 109 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index c6ab4cac..110c6081 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -15,6 +15,10 @@ Changed New identifications are produced with this type, old files could still be loaded. If you use old identifications for tracking use the `--unraw` option to unpack old times and store data with the new format. - Now amplitude is stored with .1 mm of precision (instead of 1 mm), same advice as for time. +- expose more parameters to users for bash tools build_network & divide_network +- add warning when loading a file created from a previous version of py-eddy-tracker. + + Fixed ^^^^^ @@ -22,6 +26,9 @@ Fixed - Fix bug in convolution(filter), lowest rows was replace by zeros in convolution computation. Important impact for tiny kernel - Fix method of sampling before contour fitting +- Fix bug when loading dataset in zarr format, not all variables were correctly loaded +- Fix bug when zarr dataset has same size for number of observations and contour size +- Fix bug when tracking, previous_virtual_obs was not always loaded Added ^^^^^ diff --git a/examples/02_eddy_identification/pet_eddy_detection_ACC.py b/examples/02_eddy_identification/pet_eddy_detection_ACC.py index c799a45e..e6c5e381 100644 --- a/examples/02_eddy_identification/pet_eddy_detection_ACC.py +++ b/examples/02_eddy_identification/pet_eddy_detection_ACC.py @@ -65,8 +65,7 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" y_name="latitude", # Manual area subset indexs=dict( - latitude=slice(100 - margin, 220 + margin), - longitude=slice(0, 230 + margin), + latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin), ), ) g_raw = RegularGridDataset(**kw_data) @@ -188,16 +187,10 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" ax.set_ylabel("With filter") ax.plot( - a_[field][i_a] * factor, - a[field][j_a] * factor, - "r.", - label="Anticyclonic", + a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic", ) ax.plot( - c_[field][i_c] * factor, - c[field][j_c] * factor, - "b.", - label="Cyclonic", + c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic", ) ax.set_aspect("equal"), ax.grid() ax.plot((0, 1000), (0, 1000), "g") diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index 757854d5..d6b4568b 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -149,13 +149,7 @@ def get_obs(dataset): n_.median_filter(15, "time", "latitude") kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20 m = n_.scatter_timeline( - ax, - "shape_error_e", - vmin=14, - vmax=70, - **kw, - yfield="lon", - method="all", + ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all", ) ax.set_ylabel("Longitude") cb = update_axes(ax, m["scatter"]) diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index f3ecec84..275bb795 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -422,20 +422,14 @@ def identify_time(str_date): nc_name="previous_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict( - long_name="Previous cost for previous observation", - comment="", - ), + nc_attr=dict(long_name="Previous cost for previous observation", comment="",), ), next_cost=dict( attr_name=None, nc_name="next_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict( - long_name="Next cost for next observation", - comment="", - ), + nc_attr=dict(long_name="Next cost for next observation", comment="",), ), n=dict( attr_name=None, @@ -646,8 +640,7 @@ def identify_time(str_date): nc_type="f4", nc_dims=("obs",), nc_attr=dict( - long_name="Log base 10 background chlorophyll", - units="Log(Chl/[mg/m^3])", + long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])", ), ), year=dict( diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index df4e7d43..4809fddf 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -243,8 +243,7 @@ def browse_dataset_in( filenames = bytes_(glob(full_path)) dataset_list = empty( - len(filenames), - dtype=[("filename", "S500"), ("date", "datetime64[s]")], + len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")], ) dataset_list["filename"] = filenames @@ -372,8 +371,7 @@ def track( logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max()) logger.info( - "The mean length is %d observations for long track", - c.nb_obs_by_tracks.mean(), + "The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(), ) long_track.write_file(**kw_write) @@ -383,14 +381,7 @@ def track( def get_group( - dataset1, - dataset2, - index1, - index2, - score, - invalid=2, - low=10, - high=60, + dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60, ): group1, group2 = dict(), dict() m_valid = (score * 100) >= invalid @@ -499,8 +490,7 @@ def get_values(v, dataset): ] labels = dict( - high=f"{high:0.0f} <= high", - low=f"{invalid:0.0f} <= low < {low:0.0f}", + high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}", ) keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()] diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index 5c4cdcaf..e9baa7be 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -21,6 +21,20 @@ def build_network(): parser.add_argument( "--window", "-w", type=int, help="Half time window to search eddy", default=1 ) + + parser.add_argument( + "--min-overlap", + "-p", + type=float, + help="minimum overlap area to associate observations", + default=0.2, + ) + parser.add_argument( + "--minimal-area", + action="store_true", + help="If True, use intersection/little polygon, else intersection/union", + ) + parser.contour_intern_arg() parser.memory_arg() @@ -32,7 +46,9 @@ def build_network(): intern=args.intern, memory=args.memory, ) - group = n.group_observations(minimal_area=True) + group = n.group_observations( + min_overlap=args.min_overlap, minimal_area=args.minimal_area + ) n.build_dataset(group).write_file(filename=args.out) @@ -44,6 +60,18 @@ def divide_network(): parser.add_argument( "--window", "-w", type=int, help="Half time window to search eddy", default=1 ) + parser.add_argument( + "--min-overlap", + "-p", + type=float, + help="minimum overlap area to associate observations", + default=0.2, + ) + parser.add_argument( + "--minimal-area", + action="store_true", + help="If True, use intersection/little polygon, else intersection/union", + ) args = parser.parse_args() contour_name = TrackEddiesObservations.intern(args.intern, public_label=True) e = TrackEddiesObservations.load_file( @@ -52,7 +80,12 @@ def divide_network(): ) n = NetworkObservations.from_split_network( TrackEddiesObservations.load_file(args.input, raw_data=True), - e.split_network(intern=args.intern, window=args.window), + e.split_network( + intern=args.intern, + window=args.window, + min_overlap=args.min_overlap, + minimal_area=args.minimal_area, + ), ) n.write_file(filename=args.out) @@ -76,9 +109,7 @@ def subset_network(): help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep", ) parser.add_argument( - "--remove_trash", - action="store_true", - help="Remove trash (network id == 0)", + "--remove_trash", action="store_true", help="Remove trash (network id == 0)", ) parser.add_argument( "-p", diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 5b884b68..091d2016 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -858,13 +858,11 @@ def eddy_identification( xy_i = uniform_resample( inner_contour.lon, inner_contour.lat, - num_fac=presampling_multiplier - ) - xy_e = uniform_resample( - contour.lon, - contour.lat, num_fac=presampling_multiplier, ) + xy_e = uniform_resample( + contour.lon, contour.lat, num_fac=presampling_multiplier, + ) xy_s = uniform_resample( speed_contour.lon, speed_contour.lat, diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index d2616957..3640b306 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -433,8 +433,8 @@ def __init__(self, x, y, z, levels, wrap_x=False, keep_unclose=False): closed_contours = 0 # Count level and contour for i, collection in enumerate(self.contours.collections): - collection.get_nearest_path_bbox_contain_pt = ( - lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt(i, x, y) + collection.get_nearest_path_bbox_contain_pt = lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt( + i, x, y ) nb_level += 1 diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 8f592056..1c078bf8 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1301,7 +1301,7 @@ def extract_with_period(self, period): return self.extract_with_mask(self.get_mask_with_period(period)) - def extract_light_with_mask(self, mask): + def extract_light_with_mask(self, mask, track_extra_variables=[]): """extract data with mask, but only with variables used for coherence, aka self.array_variables :param mask: mask used to extract @@ -1319,7 +1319,7 @@ def extract_light_with_mask(self, mask): variables = ["time"] + self.array_variables new = self.__class__( size=nb_obs, - track_extra_variables=[], + track_extra_variables=track_extra_variables, track_array_variables=self.track_array_variables, array_variables=self.array_variables, only_variables=variables, @@ -1333,9 +1333,22 @@ def extract_light_with_mask(self, mask): f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" ) - for field in variables: + for field in variables + track_extra_variables: logger.debug("Copy of field %s ...", field) new.obs[field] = self.obs[field][mask] + + if ( + "previous_obs" in track_extra_variables + and "next_obs" in track_extra_variables + ): + # n & p must be re-index + n, p = self.next_obs[mask], self.previous_obs[mask] + # we add 2 for -1 index return index -1 + translate = -ones(len(self) + 1, dtype="i4") + translate[:-1][mask] = arange(nb_obs) + new.next_obs[:] = translate[n] + new.previous_obs[:] = translate[p] + return new def extract_with_mask(self, mask): @@ -1495,7 +1508,8 @@ def date2file(julian_day): t_start, t_end = int(self.period[0]), int(self.period[1]) - dates = arange(t_start, t_start + n_days + 1) + # dates = arange(t_start, t_start + n_days + 1) + dates = arange(t_start, min(t_start + n_days + 1, t_end + 1)) first_files = [date_function(x) for x in dates] c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) @@ -1570,12 +1584,8 @@ def date2file(julian_day): ptf_final = zeros((self.obs.size, 2), dtype="i1") t_start, t_end = int(self.period[0]), int(self.period[1]) - # if begin is not None and begin > t_start: - # t_start = begin - # if end is not None and end < t_end: - # t_end = end - dates = arange(t_start, t_start + n_days + 1) + dates = arange(t_start, min(t_start + n_days + 1, t_end + 1)) first_files = [date_function(x) for x in dates] c = GridCollection.from_netcdf_list(first_files, dates, **uv_params) @@ -1699,7 +1709,23 @@ def group_translator(nb, duos): apply_replace(translate, gr_i, gr_j) return translate - def group_observations(self, **kwargs): + def group_observations(self, min_overlap=0.2, minimal_area=False): + """Store every interaction between identifications + + Parameters + ---------- + minimal_area : bool, optional + If True, function will compute intersection/little polygon, else intersection/union, by default False + + min_overlap : float, optional + minimum overlap area to associate observations, by default 0.2 + + Returns + ------- + TrackEddiesObservations + netcdf with interactions + """ + results, nb_obs = list(), list() # To display print only in INFO display_iteration = logger.getEffectiveLevel() == logging.INFO @@ -1713,7 +1739,12 @@ def group_observations(self, **kwargs): for j in range(i + 1, min(self.window + i + 1, self.nb_input)): xj, yj = self.buffer.load_contour(self.filenames[j]) ii, ij = bbox_intersection(xi, yi, xj, yj) - m = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) > 0.2 + m = ( + vertice_overlap( + xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + ) + > min_overlap + ) results.append((i, j, ii[m], ij[m])) if display_iteration: print() diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 56e0f67d..3543caa7 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -8,6 +8,7 @@ from tarfile import ExFileObject from tokenize import TokenError +import packaging import zarr from matplotlib.cm import get_cmap from matplotlib.collections import PolyCollection @@ -74,6 +75,29 @@ logger = logging.getLogger("pet") +# keep only major and minor version number +_software_version_reduced = packaging.version.Version( + "{v.major}.{v.minor}".format(v=packaging.version.parse(__version__)) +) + + +def _check_versions(version): + """Check if version of py_eddy_tracker used to create the file is compatible with software version + + if not, warn user with both versions + + :param version: string version of software used to create the file. If None, version was not provided + :type version: str, None + """ + + file_version = packaging.version.parse(version) if version is not None else None + if file_version is None or file_version < _software_version_reduced: + logger.warning( + "File was created with py-eddy-tracker version '%s' but software version is '%s'", + file_version, + _software_version_reduced, + ) + @njit(cache=True, fastmath=True) def shifted_ellipsoid_degrees_mask2(lon0, lat0, lon1, lat1, minor=1.5, major=1.5): @@ -687,10 +711,13 @@ def zarr_dimension(filename): h = filename else: h = zarr.open(filename) + dims = list() for varname in h: - dims.extend(list(getattr(h, varname).shape)) - return set(dims) + shape = getattr(h, varname).shape + if len(shape) > len(dims): + dims = shape + return dims @classmethod def load_file(cls, filename, **kwargs): @@ -702,11 +729,7 @@ def load_file(cls, filename, **kwargs): .. code-block:: python kwargs_latlon_300 = dict( - include_vars=[ - "longitude", - "latitude", - ], - indexs=dict(obs=slice(0, 300)), + include_vars=["longitude", "latitude",], indexs=dict(obs=slice(0, 300)), ) small_dataset = TrackEddiesObservations.load_file( filename, **kwargs_latlon_300 @@ -754,20 +777,19 @@ def load_from_zarr( :return type: class """ # FIXME - array_dim = -1 if isinstance(filename, zarr.storage.MutableMapping): h_zarr = filename else: if not isinstance(filename, str): filename = filename.astype(str) h_zarr = zarr.open(filename) + + _check_versions(h_zarr.attrs.get("framework_version", None)) var_list = cls.build_var_list(list(h_zarr.keys()), remove_vars, include_vars) nb_obs = getattr(h_zarr, var_list[0]).shape[0] - dims = list(cls.zarr_dimension(filename)) - if len(dims) == 2 and nb_obs in dims: - # FIXME must be investigated, in zarr no dimensions name (or could be add in attr) - array_dim = dims[1] if nb_obs == dims[0] else dims[0] + track_array_variables = h_zarr.attrs["track_array_variables"] + if indexs is not None and "obs" in indexs: sl = indexs["obs"] sl = slice(sl.start, min(sl.stop, nb_obs)) @@ -781,28 +803,33 @@ def load_from_zarr( logger.debug("%d observations will be load", nb_obs) kwargs = dict() - if array_dim in dims: - kwargs["track_array_variables"] = array_dim - kwargs["array_variables"] = list() - for variable in var_list: - if array_dim in h_zarr[variable].shape: - var_inv = VAR_DESCR_inv[variable] - kwargs["array_variables"].append(var_inv) - array_variables = kwargs.get("array_variables", list()) - kwargs["track_extra_variables"] = [] + kwargs["track_array_variables"] = h_zarr.attrs.get( + "track_array_variables", track_array_variables + ) + + array_variables = list() + for variable in var_list: + if len(h_zarr[variable].shape) > 1: + var_inv = VAR_DESCR_inv[variable] + array_variables.append(var_inv) + kwargs["array_variables"] = array_variables + track_extra_variables = [] + for variable in var_list: var_inv = VAR_DESCR_inv[variable] if var_inv not in cls.ELEMENTS and var_inv not in array_variables: - kwargs["track_extra_variables"].append(var_inv) + track_extra_variables.append(var_inv) + kwargs["track_extra_variables"] = track_extra_variables kwargs["raw_data"] = raw_data kwargs["only_variables"] = ( None if include_vars is None else [VAR_DESCR_inv[i] for i in include_vars] ) kwargs.update(class_kwargs) eddies = cls(size=nb_obs, **kwargs) - for variable in var_list: + + for i_var, variable in enumerate(var_list): var_inv = VAR_DESCR_inv[variable] - logger.debug("%s will be loaded", variable) + logger.debug("%s will be loaded (%d/%d)", variable, i_var, len(var_list)) # find unit factor input_unit = h_zarr[variable].attrs.get("unit", None) if input_unit is None: @@ -858,6 +885,7 @@ def copy_data_to_zarr( i_start = 0 if i_stop is None: i_stop = handler_zarr.shape[0] + for i in range(i_start, i_stop, buffer_size): sl_in = slice(i, min(i + buffer_size, i_stop)) data = handler_zarr[sl_in] @@ -868,6 +896,7 @@ def copy_data_to_zarr( data -= add_offset if scale_factor is not None: data /= scale_factor + sl_out = slice(i - i_start, i - i_start + buffer_size) handler_eddies[sl_out] = data @@ -901,6 +930,8 @@ def load_from_netcdf( else: args, kwargs = (filename,), dict() with Dataset(*args, **kwargs) as h_nc: + _check_versions(getattr(h_nc, "framework_version", None)) + var_list = cls.build_var_list( list(h_nc.variables.keys()), remove_vars, include_vars ) @@ -1032,6 +1063,7 @@ def from_zarr(cls, handler): eddies.obs[variable] = handler.variables[variable][:] else: eddies.obs[VAR_DESCR_inv[variable]] = handler.variables[variable][:] + eddies.sign_type = handler.rotation_type return eddies @classmethod @@ -1050,6 +1082,7 @@ def from_netcdf(cls, handler): eddies.obs[variable] = handler.variables[variable][:] else: eddies.obs[VAR_DESCR_inv[variable]] = handler.variables[variable][:] + eddies.sign_type = handler.rotation_type return eddies def propagate( @@ -1977,11 +2010,7 @@ def bins_stat(self, xname, bins=None, yname=None, method=None, mask=None): def format_label(self, label): t0, t1 = self.period - return label.format( - t0=t0, - t1=t1, - nb_obs=len(self), - ) + return label.format(t0=t0, t1=t1, nb_obs=len(self),) def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies @@ -2352,14 +2381,7 @@ def grid_count_pixel_in( x_, y_ = reduce_size(x_, y_) v = create_vertice(x_, y_) (x_start, x_stop), (y_start, y_stop) = bbox_indice_regular( - v, - x_bounds, - y_bounds, - xstep, - ystep, - N, - is_circular, - x_size, + v, x_bounds, y_bounds, xstep, ystep, N, is_circular, x_size, ) i, j = get_pixel_in_regular(v, x_c, y_c, x_start, x_stop, y_start, y_stop) grid_count_(grid, i, j) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 2914df6b..6612c6d5 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -657,12 +657,12 @@ def split_network(self, intern=True, **kwargs): def set_tracks(self, x, y, ids, window, **kwargs): """ - Will split one group (network) in segments + Split one group (network) in segments :param array x: coordinates of group :param array y: coordinates of group :param ndarray ids: several fields like time, group, ... - :param int windows: number of days where observations could missed + :param int window: number of days where observations could missed """ time_index = build_index((ids["time"]).astype("i4")) nb = x.shape[0] @@ -714,8 +714,8 @@ def get_previous_obs( time_e, time_ref, window, - min_overlap=0.01, - **kwargs, + min_overlap=0.2, + minimal_area=False, ): """Backward association of observations to the segments""" time_cur = int_(ids["time"][i_current]) @@ -731,7 +731,9 @@ def get_previous_obs( if len(ii) == 0: continue c = zeros(len(xj)) - c[ij] = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) + c[ij] = vertice_overlap( + xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + ) # We remove low overlap c[c < min_overlap] = 0 # We get index of maximal overlap @@ -754,8 +756,8 @@ def get_next_obs( time_e, time_ref, window, - min_overlap=0.01, - **kwargs, + min_overlap=0.2, + minimal_area=False, ): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 @@ -774,7 +776,9 @@ def get_next_obs( if len(ii) == 0: continue c = zeros(len(xj)) - c[ij] = vertice_overlap(xi[ii], yi[ii], xj[ij], yj[ij], **kwargs) + c[ij] = vertice_overlap( + xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + ) # We remove low overlap c[c < min_overlap] = 0 # We get index of maximal overlap diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index 577496ff..7543a4d3 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -350,6 +350,9 @@ def load_state(self): self.virtual_obs = VirtualEddiesObservations.from_netcdf( general_handler.groups["LastVirtualObs"] ) + self.previous_virtual_obs = VirtualEddiesObservations.from_netcdf( + general_handler.groups["LastPreviousVirtualObs"] + ) # Load and last previous virtual obs to be merge with current => will be previous2_obs # TODO : Need to rethink this line ?? self.current_obs = self.current_obs.merge( From 7a9baf42d85dbc91c012f81992f85ae6d0f3a154 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Sun, 9 Jan 2022 22:17:18 +0100 Subject: [PATCH 059/115] example to compute statistics on raw identification --- README.md | 14 +- .../pet_statistics_on_identification.py | 105 +++++++++ .../pet_statistics_on_identification.ipynb | 202 ++++++++++++++++++ src/py_eddy_tracker/dataset/grid.py | 2 +- .../observations/observation.py | 2 +- src/py_eddy_tracker/poly.py | 10 +- 6 files changed, 330 insertions(+), 5 deletions(-) create mode 100644 examples/02_eddy_identification/pet_statistics_on_identification.py create mode 100644 notebooks/python_module/02_eddy_identification/pet_statistics_on_identification.ipynb diff --git a/README.md b/README.md index e26e15ac..c9e7690f 100644 --- a/README.md +++ b/README.md @@ -17,15 +17,17 @@ Method was described in : ### Use case ### Method is used in : - + [Mason, E., A. Pascual, P. Gaube, S.Ruiz, J. Pelegrí, A. Delepoulle, 2017: Subregional characterization of mesoscale eddies across the Brazil-Malvinas Confluence](https://doi.org/10.1002/2016JC012611) ### How do I get set up? ### #### Short story #### + ```bash pip install pyeddytracker ``` + #### Long story #### To avoid problems with installation, use of the virtualenv Python virtual environment is recommended. @@ -36,12 +38,20 @@ Then use pip to install all dependencies (numpy, scipy, matplotlib, netCDF4, ... pip install numpy scipy netCDF4 matplotlib opencv-python pyyaml pint polygon3 ``` -Then run the following to install the eddy tracker: +Clone : + +```bash +git clone https://github.com/AntSimi/py-eddy-tracker +``` + +Then run the following to install the eddy tracker : ```bash python setup.py install ``` + ### Tools gallery ### + Several examples based on py eddy tracker module are [here](https://py-eddy-tracker.readthedocs.io/en/latest/python_module/index.html). [![](https://py-eddy-tracker.readthedocs.io/en/latest/_static/logo.png)](https://py-eddy-tracker.readthedocs.io/en/latest/python_module/index.html) diff --git a/examples/02_eddy_identification/pet_statistics_on_identification.py b/examples/02_eddy_identification/pet_statistics_on_identification.py new file mode 100644 index 00000000..0e4d9b34 --- /dev/null +++ b/examples/02_eddy_identification/pet_statistics_on_identification.py @@ -0,0 +1,105 @@ +""" +Stastics on identification files +================================ + +Some statistics on raw identification without any tracking +""" +import numpy as np +from matplotlib import pyplot as plt +from matplotlib.dates import date2num + +from py_eddy_tracker import start_logger +from py_eddy_tracker.data import get_remote_demo_sample +from py_eddy_tracker.observations.observation import EddiesObservations + +start_logger().setLevel("ERROR") + + +# %% +def start_axes(title): + fig = plt.figure(figsize=(13, 5)) + ax = fig.add_axes([0.03, 0.03, 0.90, 0.94]) + ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46) + ax.set_aspect("equal") + ax.set_title(title) + return ax + + +def update_axes(ax, mappable=None): + ax.grid() + if mappable: + plt.colorbar(mappable, cax=ax.figure.add_axes([0.95, 0.05, 0.01, 0.9])) + + +# %% +# We load demo sample and take only first year. +# +# Replace by a list of filename to apply on your own dataset. +file_objects = get_remote_demo_sample( + "eddies_med_adt_allsat_dt2018/Anticyclonic_2010_2011_2012" +)[:365] + +# %% +# Merge all identification dataset in one object +all_a = EddiesObservations.concatenate( + [EddiesObservations.load_file(i) for i in file_objects] +) + +# %% +# We define polygon bound +x0, x1, y0, y1 = 15, 20, 33, 38 +xs = np.array([[x0, x1, x1, x0, x0]], dtype="f8") +ys = np.array([[y0, y0, y1, y1, y0]], dtype="f8") +# Polygon object is create to be usable by match function. +polygon = dict(contour_lon_e=xs, contour_lat_e=ys, contour_lon_s=xs, contour_lat_s=ys) + +# %% +# Geographic frequency of eddies +step = 0.125 +ax = start_axes("") +# Count pixel used for each contour +g_a = all_a.grid_count(bins=((-10, 37, step), (30, 46, step)), intern=True) +m = g_a.display( + ax, cmap="terrain_r", vmin=0, vmax=0.75, factor=1 / all_a.nb_days, name="count" +) +ax.plot(polygon["contour_lon_e"][0], polygon["contour_lat_e"][0], "r") +update_axes(ax, m) + +# %% +# We use match function to count number of eddies which intersect the polygon defined previously. +# `p1_area` option allow to get in c_e/c_s output, precentage of area occupy by eddies in the polygon. +i_e, j_e, c_e = all_a.match(polygon, p1_area=True, intern=False) +i_s, j_s, c_s = all_a.match(polygon, p1_area=True, intern=True) + +# %% +dt = np.datetime64("1970-01-01") - np.datetime64("1950-01-01") +kw_hist = dict( + bins=date2num(np.arange(21900, 22300).astype("datetime64") - dt), histtype="step" +) +# translate julian day in datetime64 +t = all_a.time.astype("datetime64") - dt +# %% +# Count how many are in polygon +ax = plt.figure(figsize=(12, 6)).add_subplot(111) +ax.set_title("Different way to count eddies presence in a polygon") +ax.set_ylabel("Count") +m = all_a.mask_from_polygons(((xs, ys),)) +ax.hist(t[m], label="center in polygon", **kw_hist) +ax.hist(t[i_s[c_s > 0]], label="intersect speed contour with polygon", **kw_hist) +ax.hist(t[i_e[c_e > 0]], label="intersect extern contour with polygon", **kw_hist) +ax.legend() +ax.set_xlim(np.datetime64("2010"), np.datetime64("2011")) +ax.grid() + +# %% +# Percent of are of interest occupy by eddies +ax = plt.figure(figsize=(12, 6)).add_subplot(111) +ax.set_title("Percent of polygon occupy by an anticyclonic eddy") +ax.set_ylabel("Percent of polygon") +ax.hist(t[i_s[c_s > 0]], weights=c_s[c_s > 0] * 100.0, label="speed contour", **kw_hist) +ax.hist( + t[i_e[c_e > 0]], weights=c_e[c_e > 0] * 100.0, label="effective contour", **kw_hist +) +ax.legend(), ax.set_ylim(0, 25) +ax.set_xlim(np.datetime64("2010"), np.datetime64("2011")) +ax.grid() diff --git a/notebooks/python_module/02_eddy_identification/pet_statistics_on_identification.ipynb b/notebooks/python_module/02_eddy_identification/pet_statistics_on_identification.ipynb new file mode 100644 index 00000000..7fa04435 --- /dev/null +++ b/notebooks/python_module/02_eddy_identification/pet_statistics_on_identification.ipynb @@ -0,0 +1,202 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Stastics on identification files\n\nSome statistics on raw identification without any tracking\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import numpy as np\nfrom matplotlib import pyplot as plt\nfrom matplotlib.dates import date2num\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_remote_demo_sample\nfrom py_eddy_tracker.observations.observation import EddiesObservations\n\nstart_logger().setLevel(\"ERROR\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def start_axes(title):\n fig = plt.figure(figsize=(13, 5))\n ax = fig.add_axes([0.03, 0.03, 0.90, 0.94])\n ax.set_xlim(-6, 36.5), ax.set_ylim(30, 46)\n ax.set_aspect(\"equal\")\n ax.set_title(title)\n return ax\n\n\ndef update_axes(ax, mappable=None):\n ax.grid()\n if mappable:\n plt.colorbar(mappable, cax=ax.figure.add_axes([0.95, 0.05, 0.01, 0.9]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We load demo sample and take only first year.\n\nReplace by a list of filename to apply on your own dataset.\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "file_objects = get_remote_demo_sample(\n \"eddies_med_adt_allsat_dt2018/Anticyclonic_2010_2011_2012\"\n)[:365]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Merge all identification dataset in one object\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "all_a = EddiesObservations.concatenate(\n [EddiesObservations.load_file(i) for i in file_objects]\n)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We define polygon bound\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "x0, x1, y0, y1 = 15, 20, 33, 38\nxs = np.array([[x0, x1, x1, x0, x0]], dtype=\"f8\")\nys = np.array([[y0, y0, y1, y1, y0]], dtype=\"f8\")\n# Polygon object is create to be usable by match function.\npolygon = dict(contour_lon_e=xs, contour_lat_e=ys, contour_lon_s=xs, contour_lat_s=ys)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Geographic frequency of eddies\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "step = 0.125\nax = start_axes(\"\")\n# Count pixel used for each contour\ng_a = all_a.grid_count(bins=((-10, 37, step), (30, 46, step)), intern=True)\nm = g_a.display(\n ax, cmap=\"terrain_r\", vmin=0, vmax=0.75, factor=1 / all_a.nb_days, name=\"count\"\n)\nax.plot(polygon[\"contour_lon_e\"][0], polygon[\"contour_lat_e\"][0], \"r\")\nupdate_axes(ax, m)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We use match function to count number of eddies which intersect the polygon defined previously.\n`p1_area` option allow to get in c_e/c_s output, precentage of area occupy by eddies in the polygon.\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "i_e, j_e, c_e = all_a.match(polygon, p1_area=True, intern=False)\ni_s, j_s, c_s = all_a.match(polygon, p1_area=True, intern=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "dt = np.datetime64(\"1970-01-01\") - np.datetime64(\"1950-01-01\")\nkw_hist = dict(\n bins=date2num(np.arange(21900, 22300).astype(\"datetime64\") - dt), histtype=\"step\"\n)\n# translate julian day in datetime64\nt = all_a.time.astype(\"datetime64\") - dt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Count how many are in polygon\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = plt.figure(figsize=(12, 6)).add_subplot(111)\nax.set_title(\"Different way to count eddies presence in a polygon\")\nax.set_ylabel(\"Count\")\nm = all_a.mask_from_polygons(((xs, ys),))\nax.hist(t[m], label=\"center in polygon\", **kw_hist)\nax.hist(t[i_s[c_s > 0]], label=\"intersect speed contour with polygon\", **kw_hist)\nax.hist(t[i_e[c_e > 0]], label=\"intersect extern contour with polygon\", **kw_hist)\nax.legend()\nax.set_xlim(np.datetime64(\"2010\"), np.datetime64(\"2011\"))\nax.grid()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Percent of are of interest occupy by eddies\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = plt.figure(figsize=(12, 6)).add_subplot(111)\nax.set_title(\"Percent of polygon occupy by an anticyclonic eddy\")\nax.set_ylabel(\"Percent of polygon\")\nax.hist(t[i_s[c_s > 0]], weights=c_s[c_s > 0] * 100.0, label=\"speed contour\", **kw_hist)\nax.hist(t[i_e[c_e > 0]], weights=c_e[c_e > 0] * 100.0, label=\"effective contour\", **kw_hist)\nax.legend(), ax.set_ylim(0, 25)\nax.set_xlim(np.datetime64(\"2010\"), np.datetime64(\"2011\"))\nax.grid()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 091d2016..237577a4 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -807,7 +807,7 @@ def eddy_identification( else: centi = reset_centroid[0] centj = reset_centroid[1] - # To move in regular and unregular grid + # FIXME : To move in regular and unregular grid if len(x.shape) == 1: centlon_e = x[centi] centlat_e = y[centj] diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 3543caa7..3c8e1938 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -8,7 +8,7 @@ from tarfile import ExFileObject from tokenize import TokenError -import packaging +import packaging.version import zarr from matplotlib.cm import get_cmap from matplotlib.collections import PolyCollection diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index abe8becb..bb9ac79e 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -411,7 +411,7 @@ def merge(x, y): return concatenate(x), concatenate(y) -def vertice_overlap(x0, y0, x1, y1, minimal_area=False): +def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False): r""" Return percent of overlap for each item. @@ -420,6 +420,7 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False): :param array x1: x for polygon list 1 :param array y1: y for polygon list 1 :param bool minimal_area: If True, function will compute intersection/little polygon, else intersection/union + :param bool p1_area: If True, function will compute intersection/p1 polygon, else intersection/union :return: Result of cost function :rtype: array @@ -430,6 +431,10 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False): If minimal area: .. math:: Score = \frac{Intersection(P_0,P_1)_{area}}{min(P_{0 area},P_{1 area})} + + If P1 area: + + .. math:: Score = \frac{Intersection(P_0,P_1)_{area}}{P_{1 area}} """ nb = x0.shape[0] cost = empty(nb) @@ -443,6 +448,9 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False): # we divide intersection with the little one result from 0 to 1 if minimal_area: cost[i] = intersection / min(p0.area(), p1.area()) + # we divide intersection with p1 + elif p1_area: + cost[i] = intersection / p1.area() # we divide intersection with polygon merging result from 0 to 1 else: cost[i] = intersection / (p0 + p1).area() From cc540c03c27ac40194d049e547b9ca1cc2307572 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Tue, 11 Jan 2022 13:45:49 +0100 Subject: [PATCH 060/115] english correction --- .../pet_statistics_on_identification.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/examples/02_eddy_identification/pet_statistics_on_identification.py b/examples/02_eddy_identification/pet_statistics_on_identification.py index 0e4d9b34..0c72262f 100644 --- a/examples/02_eddy_identification/pet_statistics_on_identification.py +++ b/examples/02_eddy_identification/pet_statistics_on_identification.py @@ -40,7 +40,7 @@ def update_axes(ax, mappable=None): )[:365] # %% -# Merge all identification dataset in one object +# Merge all identification datasets in one object all_a = EddiesObservations.concatenate( [EddiesObservations.load_file(i) for i in file_objects] ) @@ -50,14 +50,14 @@ def update_axes(ax, mappable=None): x0, x1, y0, y1 = 15, 20, 33, 38 xs = np.array([[x0, x1, x1, x0, x0]], dtype="f8") ys = np.array([[y0, y0, y1, y1, y0]], dtype="f8") -# Polygon object is create to be usable by match function. +# Polygon object created for the match function use. polygon = dict(contour_lon_e=xs, contour_lat_e=ys, contour_lon_s=xs, contour_lat_s=ys) # %% # Geographic frequency of eddies step = 0.125 ax = start_axes("") -# Count pixel used for each contour +# Count pixel encompassed in each contour g_a = all_a.grid_count(bins=((-10, 37, step), (30, 46, step)), intern=True) m = g_a.display( ax, cmap="terrain_r", vmin=0, vmax=0.75, factor=1 / all_a.nb_days, name="count" @@ -66,7 +66,7 @@ def update_axes(ax, mappable=None): update_axes(ax, m) # %% -# We use match function to count number of eddies which intersect the polygon defined previously. +# We use the match function to count the number of eddies that intersect the polygon defined previously # `p1_area` option allow to get in c_e/c_s output, precentage of area occupy by eddies in the polygon. i_e, j_e, c_e = all_a.match(polygon, p1_area=True, intern=False) i_s, j_s, c_s = all_a.match(polygon, p1_area=True, intern=True) @@ -79,22 +79,22 @@ def update_axes(ax, mappable=None): # translate julian day in datetime64 t = all_a.time.astype("datetime64") - dt # %% -# Count how many are in polygon +# Number of eddies within a polygon ax = plt.figure(figsize=(12, 6)).add_subplot(111) -ax.set_title("Different way to count eddies presence in a polygon") +ax.set_title("Different ways to count eddies within a polygon") ax.set_ylabel("Count") m = all_a.mask_from_polygons(((xs, ys),)) -ax.hist(t[m], label="center in polygon", **kw_hist) -ax.hist(t[i_s[c_s > 0]], label="intersect speed contour with polygon", **kw_hist) -ax.hist(t[i_e[c_e > 0]], label="intersect extern contour with polygon", **kw_hist) +ax.hist(t[m], label="Eddy Center in polygon", **kw_hist) +ax.hist(t[i_s[c_s > 0]], label="Intersection Speed contour and polygon", **kw_hist) +ax.hist(t[i_e[c_e > 0]], label="Intersection Effective contour and polygon", **kw_hist) ax.legend() ax.set_xlim(np.datetime64("2010"), np.datetime64("2011")) ax.grid() # %% -# Percent of are of interest occupy by eddies +# Percent of the area of interest occupied by eddies. ax = plt.figure(figsize=(12, 6)).add_subplot(111) -ax.set_title("Percent of polygon occupy by an anticyclonic eddy") +ax.set_title("Percent of polygon occupied by an anticyclonic eddy") ax.set_ylabel("Percent of polygon") ax.hist(t[i_s[c_s > 0]], weights=c_s[c_s > 0] * 100.0, label="speed contour", **kw_hist) ax.hist( From 574a5e4016c86ebe29badb85082582472e26262a Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Tue, 11 Jan 2022 15:57:14 +0100 Subject: [PATCH 061/115] Add example to get path of particle with velocity field --- examples/07_cube_manipulation/README.rst | 4 - .../pet_particles_drift.py | 46 +++++++ .../pet_particles_drift.ipynb | 126 ++++++++++++++++++ src/py_eddy_tracker/dataset/grid.py | 22 +++ 4 files changed, 194 insertions(+), 4 deletions(-) create mode 100644 examples/07_cube_manipulation/pet_particles_drift.py create mode 100644 notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb diff --git a/examples/07_cube_manipulation/README.rst b/examples/07_cube_manipulation/README.rst index 147ce3f3..7cecfbd4 100644 --- a/examples/07_cube_manipulation/README.rst +++ b/examples/07_cube_manipulation/README.rst @@ -1,6 +1,2 @@ Time grid computation ===================== - -.. warning:: - - Time grid is under development, API could move quickly! diff --git a/examples/07_cube_manipulation/pet_particles_drift.py b/examples/07_cube_manipulation/pet_particles_drift.py new file mode 100644 index 00000000..f73216fc --- /dev/null +++ b/examples/07_cube_manipulation/pet_particles_drift.py @@ -0,0 +1,46 @@ +""" +Build path of particle drifting +=============================== + +""" + +from matplotlib import pyplot as plt +from numpy import arange, meshgrid + +from py_eddy_tracker import start_logger +from py_eddy_tracker.data import get_demo_path +from py_eddy_tracker.dataset.grid import GridCollection + +start_logger().setLevel("ERROR") + +# %% +# Load data cube +c = GridCollection.from_netcdf_cube( + get_demo_path("dt_med_allsat_phy_l4_2005T2.nc"), + "longitude", + "latitude", + "time", + heigth="adt", +) + +# %% +# Advection properties +nb_days, step_by_day = 10, 6 +nb_time = step_by_day * nb_days +kw_p = dict(nb_step=1, time_step=86400 / step_by_day) +t0 = 20210 + +# %% +# Get paths +x0, y0 = meshgrid(arange(32, 35, 0.5), arange(32.5, 34.5, 0.5)) +x0, y0 = x0.reshape(-1), y0.reshape(-1) +t, x, y = c.path(x0, y0, "u", "v", t_init=t0, **kw_p, nb_time=nb_time) + +# %% +# Plot paths +ax = plt.figure(figsize=(9, 6)).add_subplot(111, aspect="equal") +ax.plot(x0, y0, "k.", ms=20) +ax.plot(x, y, lw=3) +ax.set_title("10 days particle paths") +ax.set_xlim(31, 35), ax.set_ylim(32, 34.5) +ax.grid() diff --git a/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb b/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb new file mode 100644 index 00000000..53365ac7 --- /dev/null +++ b/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb @@ -0,0 +1,126 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Build path of particle drifting\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt\nfrom numpy import arange, meshgrid\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.dataset.grid import GridCollection\n\nstart_logger().setLevel(\"ERROR\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Load data cube\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "c = GridCollection.from_netcdf_cube(\n get_demo_path(\"dt_med_allsat_phy_l4_2005T2.nc\"),\n \"longitude\",\n \"latitude\",\n \"time\",\n heigth=\"adt\",\n)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Advection properties\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "nb_days, step_by_day = 10, 6\nnb_time = step_by_day * nb_days\nkw_p = dict(nb_step=1, time_step=86400 / step_by_day)\nt0 = 20210" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Get paths\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "x0, y0 = meshgrid(arange(32, 35, 0.5), arange(32.5, 34.5, 0.5))\nx0, y0 = x0.reshape(-1), y0.reshape(-1)\nt, x, y = c.path(x0, y0, \"u\", \"v\", t_init=t0, **kw_p, nb_time=nb_time)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot paths\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "ax = plt.figure(figsize=(9, 6)).add_subplot(111, aspect=\"equal\")\nax.plot(x0, y0, \"k.\", ms=20)\nax.plot(x, y, lw=3)\nax.set_title(\"10 days particle paths\")\nax.set_xlim(31, 35), ax.set_ylim(32, 34.5)\nax.grid()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 237577a4..8e9b0ac3 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2517,6 +2517,28 @@ def get_previous_time_step(self, t_init): logger.debug(f"i={i}, t={t}, dataset={dataset}") yield t, dataset + def path(self, x0, y0, *args, nb_time=2, **kwargs): + """ + At each call it will update position in place with u & v field + + :param array x0: Longitude of obs to move + :param array y0: Latitude of obs to move + :param int nb_time: Number of iteration for particle + :param dict kwargs: look at :py:meth:`GridCollection.advect` + + :return: t,x,y + + .. minigallery:: py_eddy_tracker.GridCollection.path + """ + particles = self.advect(x0.copy(), y0.copy(), *args, **kwargs) + t = empty(nb_time + 1, dtype="f8") + x = empty((nb_time + 1, x0.size), dtype=x0.dtype) + y = empty(x.shape, dtype=y0.dtype) + t[0], x[0], y[0] = kwargs.get("t_init"), x0, y0 + for i in range(nb_time): + t[i + 1], x[i + 1], y[i + 1] = particles.__next__() + return t, x, y + @njit(cache=True) def advect_t(x_g, y_g, u_g0, v_g0, m_g0, u_g1, v_g1, m_g1, x, y, m, weigths, half_w=0): From ab67c557040055976887275b96a01e62abdc5827 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment?= <49512274+ludwigVonKoopa@users.noreply.github.com> Date: Tue, 11 Jan 2022 16:11:49 +0100 Subject: [PATCH 062/115] update changelog for v3.6.0 (#129) --- CHANGELOG.rst | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 110c6081..2ec35e4b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,7 +7,19 @@ The format is based on `Keep a Changelog `_ and this project adheres to `Semantic Versioning `_. [Unreleased] ------------- +------------- +Changed +^^^^^^^ + +Fixed +^^^^^ + +Added +^^^^^ + + +[3.6.0] - 2022-01-12 +-------------------- Changed ^^^^^^^ @@ -15,8 +27,8 @@ Changed New identifications are produced with this type, old files could still be loaded. If you use old identifications for tracking use the `--unraw` option to unpack old times and store data with the new format. - Now amplitude is stored with .1 mm of precision (instead of 1 mm), same advice as for time. -- expose more parameters to users for bash tools build_network & divide_network -- add warning when loading a file created from a previous version of py-eddy-tracker. +- Expose more parameters to users for bash tools build_network & divide_network +- Add warning when loading a file created from a previous version of py-eddy-tracker. From 2d982d5515ec487c0c850c29a2676b81959288c7 Mon Sep 17 00:00:00 2001 From: Antoine <36040805+AntSimi@users.noreply.github.com> Date: Tue, 8 Feb 2022 12:32:08 +0100 Subject: [PATCH 063/115] Create codeql-analysis.yml --- .github/workflows/codeql-analysis.yml | 70 +++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .github/workflows/codeql-analysis.yml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 00000000..d9437d16 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,70 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ master ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ master ] + schedule: + - cron: '41 16 * * 4' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://git.io/codeql-language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v1 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v1 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1 From 334d4c124fd998010757479d527fcd72153dcea8 Mon Sep 17 00:00:00 2001 From: AntSimi <36040805+AntSimi@users.noreply.github.com> Date: Sun, 27 Feb 2022 21:54:49 +0100 Subject: [PATCH 064/115] check coordinates in regular grid #138 --- CHANGELOG.rst | 2 ++ src/py_eddy_tracker/dataset/grid.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2ec35e4b..57fd7551 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -14,6 +14,8 @@ Changed Fixed ^^^^^ +- Check strictly increasing coordinates for RegularGridDataset. + Added ^^^^^ diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 8e9b0ac3..797e0482 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -1193,6 +1193,10 @@ def setup_coordinates(self): raise Exception( "Coordinates in RegularGridDataset must be 1D array, or think to use UnRegularGridDataset" ) + dx = self.x_bounds[1:] - self.x_bounds[:-1] + dy = self.y_bounds[1:] - self.y_bounds[:-1] + if (dx < 0).any() or (dy < 0).any(): + raise Exception("Coordinates in RegularGridDataset must be strictly increasing") self._x_step = (self.x_c[1:] - self.x_c[:-1]).mean() self._y_step = (self.y_c[1:] - self.y_c[:-1]).mean() From 5b2f6ff84e4f545a37099061091aba9e1e80e5f0 Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Thu, 7 Jul 2022 12:11:05 +0200 Subject: [PATCH 065/115] check mask #145 (#150) * check mask #145 * update python version for doc * switch order in requirement * fix pint version --- .github/workflows/python-app.yml | 2 +- doc/environment.yml | 2 +- requirements.txt | 8 ++++---- src/py_eddy_tracker/dataset/grid.py | 18 +++++++++--------- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index a6fcceed..bbc0662c 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -11,7 +11,7 @@ jobs: matrix: # os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, windows-latest] - python_version: [3.7, 3.8, 3.9] + python_version: [3.7, 3.8, 3.9, '3.10'] name: Run py eddy tracker build tests runs-on: ${{ matrix.os }} defaults: diff --git a/doc/environment.yml b/doc/environment.yml index 7dcb504d..9d882911 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -2,7 +2,7 @@ channels: - conda-forge - defaults dependencies: - - python=3.8 + - python=3.10 - ffmpeg - pip: - sphinx-gallery diff --git a/requirements.txt b/requirements.txt index 477cf32d..497344e6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ matplotlib -netCDF4 -numba>=0.53 -numpy<1.21 opencv-python -pint +pint==0.18 polygon3 pyyaml requests scipy zarr +netCDF4<1.6 +numpy<1.23 +numba<0.56 \ No newline at end of file diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 797e0482..30cdd863 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -402,6 +402,14 @@ def load(self): self.setup_coordinates() + @staticmethod + def get_mask(a): + if len(a.mask.shape): + m = a.mask + else: + m = ones(a.shape, dtype='bool') if a.mask else zeros(a.shape, dtype='bool') + return m + @staticmethod def c_to_bounds(c): """ @@ -1126,7 +1134,7 @@ def _low_filter(self, grid_name, w_cut, factor=8.0): bins = (x_array, y_array) x_flat, y_flat, z_flat = x.reshape((-1,)), y.reshape((-1,)), data.reshape((-1,)) - m = ~z_flat.mask + m = ~self.get_mask(z_flat) x_flat, y_flat, z_flat = x_flat[m], y_flat[m], z_flat[m] nb_value, _, _ = histogram2d(x_flat, y_flat, bins=bins) @@ -1936,14 +1944,6 @@ def regrid(self, other, grid_name, new_name=None): # self.variables_description[new_name]['infos'] = False # self.variables_description[new_name]['kwargs']['dimensions'] = ... - @staticmethod - def get_mask(a): - if len(a.mask.shape): - m = a.mask - else: - m = ones(a.shape) if a.mask else zeros(a.shape) - return m - def interp(self, grid_name, lons, lats, method="bilinear"): """ Compute z over lons, lats From e43c5692f48bab7c242722391cb0cd654423664a Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 7 Sep 2022 17:40:28 +0200 Subject: [PATCH 066/115] Add method to create obs from array Nb obs by network get track slice --- README.md | 2 +- src/py_eddy_tracker/observations/groups.py | 7 ++-- src/py_eddy_tracker/observations/network.py | 31 +++++++++++++-- .../observations/observation.py | 38 ++++++++++++++++--- src/py_eddy_tracker/observations/tracking.py | 9 ++++- 5 files changed, 74 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index c9e7690f..98a16b62 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Method was described in : -[Pegliasco, C., Delepoulle, A., Morrow, R., Faugère, Y., and Dibarboure, G.: META3.1exp : A new Global Mesoscale Eddy Trajectories Atlas derived from altimetry, Earth Syst. Sci. Data Discuss.](https://doi.org/10.5194/essd-2021-300) +[Pegliasco, C., Delepoulle, A., Morrow, R., Faugère, Y., and Dibarboure, G.: META3.1exp : A new Global Mesoscale Eddy Trajectories Atlas derived from altimetry, Earth Syst. Sci. Data Discuss.](https://doi.org/10.5194/essd-14-1087-2022) [Mason, E., A. Pascual, and J. C. McWilliams, 2014: A new sea surface height–based code for oceanic mesoscale eddy tracking.](https://doi.org/10.1175/JTECH-D-14-00019.1) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 6fea0ace..fcb6733b 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -66,7 +66,7 @@ def get_missing_indices( return indices -def advect(x, y, c, t0, n_days): +def advect(x, y, c, t0, n_days, u_name='u', v_name='v'): """ Advect particles from t0 to t0 + n_days, with data cube. @@ -75,13 +75,15 @@ def advect(x, y, c, t0, n_days): :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles :param int t0: julian day of advection start :param int n_days: number of days to advect + :param str u_name: variable name for u component + :param str v_name: variable name for v component """ kw = dict(nb_step=6, time_step=86400 / 6) if n_days < 0: kw["backward"] = True n_days = -n_days - p = c.advect(x, y, "u", "v", t_init=t0, **kw) + p = c.advect(x, y, u_name, v_name, t_init=t0, **kw) for _ in range(n_days): t, x, y = p.__next__() return t, x, y @@ -125,7 +127,6 @@ def particle_candidate( else: x, y, i_start = e.create_particles(step_mesh, intern=True) print("The contour_start was not correct, speed contour is used") - # Advection t_end, x, y = advect(x, y, c, t_start, **kwargs) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 1c078bf8..4a884705 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -161,6 +161,16 @@ def index_network(self): self._index_network = build_index(self.track) return self._index_network + def network_size(self, id_networks): + """ + Return size for specified network + + :param list,array id_networks: ids to identify network + """ + i = id_networks - self.index_network[2] + i_start, i_stop = self.index_network[0][i], self.index_network[1][i] + return i_stop - i_start + def network_slice(self, id_network): """ Return slice for one network @@ -679,7 +689,13 @@ def display_timeline( """ self.only_one_network() j = 0 - line_kw = dict(ls="-", marker="+", markersize=6, zorder=1, lw=3,) + line_kw = dict( + ls="-", + marker="+", + markersize=6, + zorder=1, + lw=3, + ) line_kw.update(kwargs) mappables = dict(lines=list()) @@ -912,7 +928,10 @@ def event_map(self, ax, **kwargs): """Add the merging and splitting events to a map""" j = 0 mappables = dict() - symbol_kw = dict(markersize=10, color="k",) + symbol_kw = dict( + markersize=10, + color="k", + ) symbol_kw.update(kwargs) symbol_kw_split = symbol_kw.copy() symbol_kw_split["markersize"] += 4 @@ -941,7 +960,13 @@ def event_map(self, ax, **kwargs): return mappables def scatter( - self, ax, name="time", factor=1, ref=None, edgecolor_cycle=None, **kwargs, + self, + ax, + name="time", + factor=1, + ref=None, + edgecolor_cycle=None, + **kwargs, ): """ This function scatters the path of each network, with the merging and splitting events diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 3c8e1938..043b504d 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -130,7 +130,7 @@ def shifted_ellipsoid_degrees_mask2(lon0, lat0, lon1, lat1, minor=1.5, major=1.5 if dx > major[j]: m[j, i] = False continue - d_normalize = dx ** 2 / major[j] ** 2 + dy ** 2 / minor ** 2 + d_normalize = dx**2 / major[j] ** 2 + dy**2 / minor**2 m[j, i] = d_normalize < 1.0 return m @@ -729,7 +729,11 @@ def load_file(cls, filename, **kwargs): .. code-block:: python kwargs_latlon_300 = dict( - include_vars=["longitude", "latitude",], indexs=dict(obs=slice(0, 300)), + include_vars=[ + "longitude", + "latitude", + ], + indexs=dict(obs=slice(0, 300)), ) small_dataset = TrackEddiesObservations.load_file( filename, **kwargs_latlon_300 @@ -1047,6 +1051,19 @@ def compare_units(input_unit, output_unit, name): output_unit, ) + @classmethod + def from_array(cls, arrays, **kwargs): + nb = arrays["time"].size + # if hasattr(handler, "track_array_variables"): + # kwargs["track_array_variables"] = handler.track_array_variables + # kwargs["array_variables"] = handler.array_variables.split(",") + # if len(handler.track_extra_variables) > 1: + # kwargs["track_extra_variables"] = handler.track_extra_variables.split(",") + eddies = cls(size=nb, **kwargs) + for k, v in arrays.items(): + eddies.obs[k] = v + return eddies + @classmethod def from_zarr(cls, handler): nb_obs = len(handler.dimensions[cls.obs_dimension(handler)]) @@ -1302,7 +1319,7 @@ def fixed_ellipsoid_mask( if isinstance(minor, ndarray): minor = minor[index_self] # focal distance - f_degree = ((major ** 2 - minor ** 2) ** 0.5) / ( + f_degree = ((major**2 - minor**2) ** 0.5) / ( 111.2 * cos(radians(self.lat[index_self])) ) @@ -2010,7 +2027,11 @@ def bins_stat(self, xname, bins=None, yname=None, method=None, mask=None): def format_label(self, label): t0, t1 = self.period - return label.format(t0=t0, t1=t1, nb_obs=len(self),) + return label.format( + t0=t0, + t1=t1, + nb_obs=len(self), + ) def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies @@ -2381,7 +2402,14 @@ def grid_count_pixel_in( x_, y_ = reduce_size(x_, y_) v = create_vertice(x_, y_) (x_start, x_stop), (y_start, y_stop) = bbox_indice_regular( - v, x_bounds, y_bounds, xstep, ystep, N, is_circular, x_size, + v, + x_bounds, + y_bounds, + xstep, + ystep, + N, + is_circular, + x_size, ) i, j = get_pixel_in_regular(v, x_c, y_c, x_start, x_stop, y_start, y_stop) grid_count_(grid, i, j) diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 6612c6d5..7680961c 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -68,6 +68,10 @@ def __init__(self, *args, **kwargs): self.__obs_by_track = None self.__nb_track = None + def track_slice(self, track): + i0 = self.index_from_track[track] + return slice(i0, i0 + self.nb_obs_by_track[track]) + def iter_track(self): """ Yield track @@ -582,7 +586,10 @@ def close_tracks(self, other, nb_obs_min=10, **kwargs): def format_label(self, label): t0, t1 = self.period return label.format( - t0=t0, t1=t1, nb_obs=len(self), nb_tracks=(self.nb_obs_by_track != 0).sum(), + t0=t0, + t1=t1, + nb_obs=len(self), + nb_tracks=(self.nb_obs_by_track != 0).sum(), ) def plot(self, ax, ref=None, **kwargs): From 6dcbbc43812ac2f1a51983253ff779ae8020741b Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Tue, 13 Sep 2022 12:20:31 +0200 Subject: [PATCH 067/115] Add log in GUI (#155) Give a factor in all case of unit --- check.sh | 2 +- src/py_eddy_tracker/dataset/grid.py | 2 +- src/py_eddy_tracker/gui.py | 5 ++++- src/py_eddy_tracker/observations/observation.py | 3 +++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/check.sh b/check.sh index ddafab69..b158028a 100644 --- a/check.sh +++ b/check.sh @@ -4,4 +4,4 @@ blackdoc src tests examples flake8 tests examples src --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 tests examples src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics -pytest -vv --cov py_eddy_tracker --cov-report html +python -m pytest -vv --cov py_eddy_tracker --cov-report html diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 30cdd863..bf02a1b0 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -407,7 +407,7 @@ def get_mask(a): if len(a.mask.shape): m = a.mask else: - m = ones(a.shape, dtype='bool') if a.mask else zeros(a.shape, dtype='bool') + m = ones(a.shape, dtype="bool") if a.mask else zeros(a.shape, dtype="bool") return m @staticmethod diff --git a/src/py_eddy_tracker/gui.py b/src/py_eddy_tracker/gui.py index deeb6660..0f310467 100644 --- a/src/py_eddy_tracker/gui.py +++ b/src/py_eddy_tracker/gui.py @@ -3,6 +3,7 @@ GUI class """ +import logging from datetime import datetime, timedelta import matplotlib.pyplot as plt @@ -11,6 +12,8 @@ from .generic import flatten_line_matrix, split_line +logger = logging.getLogger("pet") + try: from pylook.axes import PlatCarreAxes except ImportError: @@ -91,7 +94,7 @@ def set_initial_values(self): for dataset in self.datasets.values(): t0_, t1_ = dataset.period t0, t1 = min(t0, t0_), max(t1, t1_) - + logger.debug("period detected %f -> %f", t0, t1) self.settings = dict(period=(t0, t1), now=t1) @property diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 043b504d..651aaa9a 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -1050,6 +1050,9 @@ def compare_units(input_unit, output_unit, name): input_unit, output_unit, ) + return factor + else: + return 1 @classmethod def from_array(cls, arrays, **kwargs): From 9e04d815e3120c55d4b6b2c82b130236373978a4 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 19 Sep 2022 10:11:51 +0200 Subject: [PATCH 068/115] - Add method to colorize contour with a field - Add option to force align on to return all step for reference dataset - Add method and property to network to easily select segment and network - Add method to found same track/segment/network in dataset - Rewrite particle candidate to be easily parallelize --- setup.cfg | 14 + setup.py | 1 + src/py_eddy_tracker/__init__.py | 13 +- src/py_eddy_tracker/appli/eddies.py | 18 +- src/py_eddy_tracker/appli/network.py | 133 +++++++++ src/py_eddy_tracker/dataset/grid.py | 38 ++- src/py_eddy_tracker/eddy_feature.py | 6 +- src/py_eddy_tracker/generic.py | 59 +++- src/py_eddy_tracker/misc.py | 19 ++ src/py_eddy_tracker/observations/groups.py | 136 ++++++++- src/py_eddy_tracker/observations/network.py | 277 ++++++++++++++++-- .../observations/observation.py | 73 +++-- src/py_eddy_tracker/poly.py | 41 ++- tests/test_grid.py | 16 +- tests/test_poly.py | 2 +- 15 files changed, 738 insertions(+), 108 deletions(-) create mode 100644 src/py_eddy_tracker/misc.py diff --git a/setup.cfg b/setup.cfg index dfed5c3b..66f3f495 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,22 @@ [flake8] +max-line-length = 140 ignore = E203, # whitespace before ':' W503, # line break before binary operator +[isort] +combine_as_imports=True +force_grid_wrap=0 +force_sort_within_sections=True +force_to_top=typing +include_trailing_comma=True +line_length=140 +multi_line_output=3 +skip= + build + docs/source/conf.py + + [versioneer] VCS = git style = pep440 diff --git a/setup.py b/setup.py index 06432bd1..e0767c10 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ "EddyNetworkGroup = py_eddy_tracker.appli.network:build_network", "EddyNetworkBuildPath = py_eddy_tracker.appli.network:divide_network", "EddyNetworkSubSetter = py_eddy_tracker.appli.network:subset_network", + "EddyNetworkQuickCompare = py_eddy_tracker.appli.network:quick_compare", # anim/gui "EddyAnim = py_eddy_tracker.appli.gui:anim", "GUIEddy = py_eddy_tracker.appli.gui:guieddy", diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 275bb795..f3ecec84 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -422,14 +422,20 @@ def identify_time(str_date): nc_name="previous_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict(long_name="Previous cost for previous observation", comment="",), + nc_attr=dict( + long_name="Previous cost for previous observation", + comment="", + ), ), next_cost=dict( attr_name=None, nc_name="next_cost", nc_type="float32", nc_dims=("obs",), - nc_attr=dict(long_name="Next cost for next observation", comment="",), + nc_attr=dict( + long_name="Next cost for next observation", + comment="", + ), ), n=dict( attr_name=None, @@ -640,7 +646,8 @@ def identify_time(str_date): nc_type="f4", nc_dims=("obs",), nc_attr=dict( - long_name="Log base 10 background chlorophyll", units="Log(Chl/[mg/m^3])", + long_name="Log base 10 background chlorophyll", + units="Log(Chl/[mg/m^3])", ), ), year=dict( diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index 4809fddf..df4e7d43 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -243,7 +243,8 @@ def browse_dataset_in( filenames = bytes_(glob(full_path)) dataset_list = empty( - len(filenames), dtype=[("filename", "S500"), ("date", "datetime64[s]")], + len(filenames), + dtype=[("filename", "S500"), ("date", "datetime64[s]")], ) dataset_list["filename"] = filenames @@ -371,7 +372,8 @@ def track( logger.info("Longer track saved have %d obs", c.nb_obs_by_tracks.max()) logger.info( - "The mean length is %d observations for long track", c.nb_obs_by_tracks.mean(), + "The mean length is %d observations for long track", + c.nb_obs_by_tracks.mean(), ) long_track.write_file(**kw_write) @@ -381,7 +383,14 @@ def track( def get_group( - dataset1, dataset2, index1, index2, score, invalid=2, low=10, high=60, + dataset1, + dataset2, + index1, + index2, + score, + invalid=2, + low=10, + high=60, ): group1, group2 = dict(), dict() m_valid = (score * 100) >= invalid @@ -490,7 +499,8 @@ def get_values(v, dataset): ] labels = dict( - high=f"{high:0.0f} <= high", low=f"{invalid:0.0f} <= low < {low:0.0f}", + high=f"{high:0.0f} <= high", + low=f"{invalid:0.0f} <= low < {low:0.0f}", ) keys = [labels.get(key, key) for key in list(gr_ref.values())[0].keys()] diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index e9baa7be..bfe226cc 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -8,6 +8,7 @@ from .. import EddyParser from ..observations.network import Network, NetworkObservations from ..observations.tracking import TrackEddiesObservations +from numpy import in1d, zeros logger = logging.getLogger("pet") @@ -128,3 +129,135 @@ def subset_network(): if args.period is not None: n = n.extract_with_period(args.period) n.write_file(filename=args.out) + + +def quick_compare(): + parser = EddyParser( + """Tool to have a quick comparison between several network: + - N : network + - S : segment + - Obs : observations + """ + + ) + parser.add_argument("ref", help="Identification file of reference") + parser.add_argument("others", nargs="+", help="Identifications files to compare") + parser.add_argument( + "--path_out", default=None, help="Save each group in separate file" + ) + args = parser.parse_args() + + kw = dict( + include_vars=['longitude', 'latitude', 'time', 'track', 'segment', 'next_obs', 'previous_obs'] + ) + + if args.path_out is not None: + kw = dict() + + ref = NetworkObservations.load_file(args.ref, **kw) + print( + f"[ref] {args.ref} -> {ref.nb_network} network / {ref.nb_segment} segment / {len(ref)} obs " + f"-> {ref.network_size(0)} trash obs, " + f"{len(ref.merging_event())} merging, {len(ref.splitting_event())} spliting" + ) + others = {other: NetworkObservations.load_file(other, **kw) for other in args.others} + + if args.path_out is not None: + groups_ref, groups_other = run_compare(ref, others, **kwargs) + if not exists(args.path_out): + mkdir(args.path_out) + for i, other_ in enumerate(args.others): + dirname_ = f"{args.path_out}/{other_.replace('/', '_')}/" + if not exists(dirname_): + mkdir(dirname_) + for k, v in groups_other[other_].items(): + basename_ = f"other_{k}.nc" + others[other_].index(v).write_file(filename=f"{dirname_}/{basename_}") + for k, v in groups_ref[other_].items(): + basename_ = f"ref_{k}.nc" + ref.index(v).write_file(filename=f"{dirname_}/{basename_}") + return + display_compare(ref, others) + + +def run_compare(ref, others): + outs = dict() + for i, (k, other) in enumerate(others.items()): + out = dict() + print( + f"[{i}] {k} -> {other.nb_network} network / {other.nb_segment} segment / {len(other)} obs " + f"-> {other.network_size(0)} trash obs, " + f"{len(other.merging_event())} merging, {len(other.splitting_event())} spliting" + ) + ref_id, other_id = ref.identify_in(other, size_min=2) + m = other_id != -1 + ref_id, other_id = ref_id[m], other_id[m] + out['same N(N)'] = m.sum() + out['same N(Obs)'] = ref.network_size(ref_id).sum() + + # For network which have same obs + ref_, other_ = ref.networks(ref_id), other.networks(other_id) + ref_segu, other_segu = ref_.identify_in(other_, segment=True) + m = other_segu==-1 + ref_track_no_match, _ = ref_.unique_segment_to_id(ref_segu[m]) + ref_segu, other_segu = ref_segu[~m], other_segu[~m] + m = ~in1d(ref_id, ref_track_no_match) + out['same NS(N)'] = m.sum() + out['same NS(Obs)'] = ref.network_size(ref_id[m]).sum() + + # Check merge/split + def follow_obs(d, i_follow): + m = i_follow != -1 + i_follow = i_follow[m] + t, x, y = zeros(m.size, d.time.dtype), zeros(m.size, d.longitude.dtype), zeros(m.size, d.latitude.dtype) + t[m], x[m], y[m] = d.time[i_follow], d.longitude[i_follow], d.latitude[i_follow] + return t, x, y + def next_obs(d, i_seg): + last_i = d.index_segment_track[1][i_seg] - 1 + return follow_obs(d, d.next_obs[last_i]) + def previous_obs(d, i_seg): + first_i = d.index_segment_track[0][i_seg] + return follow_obs(d, d.previous_obs[first_i]) + + tref, xref, yref = next_obs(ref_, ref_segu) + tother, xother, yother = next_obs(other_, other_segu) + + m = (tref == tother) & (xref == xother) & (yref == yother) + print(m.sum(), m.size, ref_segu.size, ref_track_no_match.size) + + tref, xref, yref = previous_obs(ref_, ref_segu) + tother, xother, yother = previous_obs(other_, other_segu) + + m = (tref == tother) & (xref == xother) & (yref == yother) + print(m.sum(), m.size, ref_segu.size, ref_track_no_match.size) + + + + ref_segu, other_segu = ref.identify_in(other, segment=True) + m = other_segu != -1 + out['same S(S)'] = m.sum() + out['same S(Obs)'] = ref.segment_size()[ref_segu[m]].sum() + + outs[k] = out + return outs + +def display_compare(ref, others): + def display(value, ref=None): + if ref: + outs = [f"{v/ref[k] * 100:.1f}% ({v})" for k, v in value.items()] + else: + outs = value + return "".join([f"{v:^18}" for v in outs]) + + datas = run_compare(ref, others) + ref_ = { + 'same N(N)' : ref.nb_network, + "same N(Obs)": len(ref), + 'same NS(N)' : ref.nb_network, + 'same NS(Obs)' : len(ref), + 'same S(S)' : ref.nb_segment, + 'same S(Obs)' : len(ref), + } + print(" ", display(ref_.keys())) + for i, (_, v) in enumerate(datas.items()): + print(f"[{i:2}] ", display(v, ref=ref_)) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index bf02a1b0..24b1e25b 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -310,6 +310,11 @@ def __init__( self.load_general_features() self.load() + def populate(self): + if self.dimensions is None: + self.load_general_features() + self.load() + @property def is_centered(self): """Give True if pixel is described with its center's position or @@ -539,7 +544,8 @@ def grid(self, varname, indexs=None): self.vars[varname] = self.vars[varname].T if self.nan_mask: self.vars[varname] = ma.array( - self.vars[varname], mask=isnan(self.vars[varname]), + self.vars[varname], + mask=isnan(self.vars[varname]), ) if not hasattr(self.vars[varname], "mask"): self.vars[varname] = ma.array( @@ -869,7 +875,9 @@ def eddy_identification( num_fac=presampling_multiplier, ) xy_e = uniform_resample( - contour.lon, contour.lat, num_fac=presampling_multiplier, + contour.lon, + contour.lat, + num_fac=presampling_multiplier, ) xy_s = uniform_resample( speed_contour.lon, @@ -1204,7 +1212,9 @@ def setup_coordinates(self): dx = self.x_bounds[1:] - self.x_bounds[:-1] dy = self.y_bounds[1:] - self.y_bounds[:-1] if (dx < 0).any() or (dy < 0).any(): - raise Exception("Coordinates in RegularGridDataset must be strictly increasing") + raise Exception( + "Coordinates in RegularGridDataset must be strictly increasing" + ) self._x_step = (self.x_c[1:] - self.x_c[:-1]).mean() self._y_step = (self.y_c[1:] - self.y_c[:-1]).mean() @@ -1736,7 +1746,7 @@ def compute_stencil( self.x_c, self.y_c, data.data, - data.mask, + self.get_mask(data), self.EARTH_RADIUS, vertical=vertical, stencil_halfwidth=stencil_halfwidth, @@ -2285,23 +2295,23 @@ def __init__(self): self.datasets = list() @classmethod - def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None): + def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None, **kwargs): new = cls() with Dataset(filename) as h: for i, t in enumerate(h.variables[t_name][:]): - d = RegularGridDataset(filename, x_name, y_name, indexs={t_name: i}) + d = RegularGridDataset(filename, x_name, y_name, indexs={t_name: i}, **kwargs) if heigth is not None: d.add_uv(heigth) new.datasets.append((t, d)) return new @classmethod - def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None): + def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None, **kwargs): new = cls() for i, _t in enumerate(t): filename = filenames[i] logger.debug(f"load file {i:02d}/{len(t)} t={_t} : {filename}") - d = RegularGridDataset(filename, x_name, y_name, indexs=indexs) + d = RegularGridDataset(filename, x_name, y_name, indexs=indexs, **kwargs) if heigth is not None: d.add_uv(heigth) new.datasets.append((_t, d)) @@ -2349,6 +2359,7 @@ def __iter__(self): def __getitem__(self, item): for t, d in self.datasets: if t == item: + d.populate() return d raise KeyError(item) @@ -2448,10 +2459,13 @@ def advect( :param array y: Latitude of obs to move :param str,array u_name: U field to advect obs :param str,array v_name: V field to advect obs + :param float t_init: time to start advection + :param array,None mask_particule: advect only i mask is True :param int nb_step: Number of iteration before to release data :param int time_step: Number of second for each advection + :param bool rk4: Use rk4 algorithm instead of finite difference - :return: x,y position + :return: t,x,y position .. minigallery:: py_eddy_tracker.GridCollection.advect """ @@ -2477,7 +2491,7 @@ def advect( else: mask_particule += isnan(x) + isnan(y) while True: - logger.debug(f"advect : t={t}") + logger.debug(f"advect : t={t/86400}") if (backward and t <= t1) or (not backward and t >= t1): t0, u0, v0, m0 = t1, u1, v1, m1 t1, d1 = generator.__next__() @@ -2507,7 +2521,7 @@ def get_next_time_step(self, t_init): for i, (t, dataset) in enumerate(self.datasets): if t < t_init: continue - + dataset.populate() logger.debug(f"i={i}, t={t}, dataset={dataset}") yield t, dataset @@ -2517,7 +2531,7 @@ def get_previous_time_step(self, t_init): i -= 1 if t > t_init: continue - + dataset.populate() logger.debug(f"i={i}, t={t}, dataset={dataset}") yield t, dataset diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index 3640b306..0f13eb2a 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -433,8 +433,8 @@ def __init__(self, x, y, z, levels, wrap_x=False, keep_unclose=False): closed_contours = 0 # Count level and contour for i, collection in enumerate(self.contours.collections): - collection.get_nearest_path_bbox_contain_pt = lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt( - i, x, y + collection.get_nearest_path_bbox_contain_pt = ( + lambda x, y, i=i: self.get_index_nearest_path_bbox_contain_pt(i, x, y) ) nb_level += 1 @@ -784,7 +784,7 @@ def index_from_nearest_path_with_pt_in_bbox_( d_x = x_value[i_elt_pt] - xpt_ if abs(d_x) > 180: d_x = (d_x + 180) % 360 - 180 - dist = d_x ** 2 + (y_value[i_elt_pt] - ypt) ** 2 + dist = d_x**2 + (y_value[i_elt_pt] - ypt) ** 2 if dist < dist_ref: dist_ref = dist i_ref = i_elt_c diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 94cf321f..c2d7de8a 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -132,8 +132,8 @@ def distance_grid(lon0, lat0, lon1, lat1): sin_dlon = sin((dlon) * 0.5 * D2R) cos_lat1 = cos(lat0[i] * D2R) cos_lat2 = cos(lat1[j] * D2R) - a_val = sin_dlon ** 2 * cos_lat1 * cos_lat2 + sin_dlat ** 2 - dist[i, j] = 6370.997 * 2 * arctan2(a_val ** 0.5, (1 - a_val) ** 0.5) + a_val = sin_dlon**2 * cos_lat1 * cos_lat2 + sin_dlat**2 + dist[i, j] = 6370.997 * 2 * arctan2(a_val**0.5, (1 - a_val) ** 0.5) return dist @@ -154,8 +154,8 @@ def distance(lon0, lat0, lon1, lat1): sin_dlon = sin((lon1 - lon0) * 0.5 * D2R) cos_lat1 = cos(lat0 * D2R) cos_lat2 = cos(lat1 * D2R) - a_val = sin_dlon ** 2 * cos_lat1 * cos_lat2 + sin_dlat ** 2 - return 6370997.0 * 2 * arctan2(a_val ** 0.5, (1 - a_val) ** 0.5) + a_val = sin_dlon**2 * cos_lat1 * cos_lat2 + sin_dlat**2 + return 6370997.0 * 2 * arctan2(a_val**0.5, (1 - a_val) ** 0.5) @njit(cache=True) @@ -367,7 +367,7 @@ def simplify(x, y, precision=0.1): :return: (x,y) :rtype: (array,array) """ - precision2 = precision ** 2 + precision2 = precision**2 nb = x.shape[0] # will be True for kept values mask = ones(nb, dtype=bool_) @@ -399,7 +399,7 @@ def simplify(x, y, precision=0.1): if d_y > precision: x_previous, y_previous = x_, y_ continue - d2 = d_x ** 2 + d_y ** 2 + d2 = d_x**2 + d_y**2 if d2 > precision2: x_previous, y_previous = x_, y_ continue @@ -517,8 +517,8 @@ def coordinates_to_local(lon, lat, lon0, lat0): sin_dlon = sin(dlon * 0.5) cos_lat0 = cos(lat0 * D2R) cos_lat = cos(lat * D2R) - a_val = sin_dlon ** 2 * cos_lat0 * cos_lat + sin_dlat ** 2 - module = R * 2 * arctan2(a_val ** 0.5, (1 - a_val) ** 0.5) + a_val = sin_dlon**2 * cos_lat0 * cos_lat + sin_dlat**2 + module = R * 2 * arctan2(a_val**0.5, (1 - a_val) ** 0.5) azimuth = pi / 2 - arctan2( cos_lat * sin(dlon), @@ -541,7 +541,7 @@ def local_to_coordinates(x, y, lon0, lat0): """ D2R = pi / 180.0 R = 6370997 - d = (x ** 2 + y ** 2) ** 0.5 / R + d = (x**2 + y**2) ** 0.5 / R a = -(arctan2(y, x) - pi / 2) lat = arcsin(sin(lat0 * D2R) * cos(d) + cos(lat0 * D2R) * sin(d) * cos(a)) lon = ( @@ -612,3 +612,44 @@ def build_circle(x0, y0, r): angle = radians(linspace(0, 360, 50)) x_norm, y_norm = cos(angle), sin(angle) return x_norm * r + x0, y_norm * r + y0 + + +@njit(cache=True) +def window_index(x, x0, half_window=1): + """ + Give for a fixed half_window each start and end index for each x0, in + an unsorted array. + + :param array x: array of value + :param array x0: array of window center + :param float half_window: half window + """ + # Sort array, bounds will be sort also + i_ordered = x.argsort() + nb_x, nb_pt = x.size, x0.size + first_index = empty(nb_pt, dtype=i_ordered.dtype) + last_index = empty(nb_pt, dtype=i_ordered.dtype) + # First bound to find + j_min, j_max = 0, 0 + x_min = x0[j_min] - half_window + x_max = x0[j_max] + half_window + # We iterate on ordered x + for i, i_x in enumerate(i_ordered): + x_ = x[i_x] + # if x bigger than x_min , we found bound and search next one + while x_ > x_min and j_min < nb_pt: + first_index[j_min] = i + j_min += 1 + x_min = x0[j_min] - half_window + # if x bigger than x_max , we found bound and search next one + while x_ > x_max and j_max < nb_pt: + last_index[j_max] = i + j_max += 1 + x_max = x0[j_max] + half_window + if j_max == nb_pt: + break + for i in range(j_min, nb_pt): + first_index[i] = nb_x + for i in range(j_max, nb_pt): + last_index[i] = nb_x + return i_ordered, first_index, last_index diff --git a/src/py_eddy_tracker/misc.py b/src/py_eddy_tracker/misc.py new file mode 100644 index 00000000..eb0dc5d1 --- /dev/null +++ b/src/py_eddy_tracker/misc.py @@ -0,0 +1,19 @@ +import re +from matplotlib.animation import FuncAnimation + +class VideoAnimation(FuncAnimation): + def _repr_html_(self, *args, **kwargs): + """To get video in html and have a player""" + content = self.to_html5_video() + return re.sub( + r'width="[0-9]*"\sheight="[0-9]*"', 'width="100%" height="100%"', content + ) + + def save(self, *args, **kwargs): + if args[0].endswith("gif"): + # In this case gif is used to create thumbnail which is not used but consume same time than video + # So we create an empty file, to save time + with open(args[0], "w") as _: + pass + return + return super().save(*args, **kwargs) \ No newline at end of file diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index fcb6733b..66574407 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -3,10 +3,13 @@ from numba import njit from numba import types as nb_types -from numpy import arange, int32, interp, median, where, zeros +from numpy import arange, int32, interp, median, where, zeros, full, isnan from .observation import EddiesObservations +from ..generic import window_index +from ..poly import create_meshed_particles, poly_indexs + logger = logging.getLogger("pet") @@ -89,6 +92,39 @@ def advect(x, y, c, t0, n_days, u_name='u', v_name='v'): return t, x, y +def particle_candidate_step(t_start, contours_start, contours_end, space_step, dt, c, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages. + For one time step. + + :param int t_start: julian day of the advection + :param (np.array(float),np.array(float)) contours_start: origin contour + :param (np.array(float),np.array(float)) contours_end: destination contour + :param float space_step: step between 2 particles + :param int dt: duration of advection + :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :params dict kwargs: dict of params given to advection + :return (np.array,np.array): return target index and percent associate + """ + # Create particles in start contour + x, y, i_start = create_meshed_particles(*contours_start, space_step) + # Advect particles + kw = dict(nb_step=6, time_step=86400 / 6) + p = c.advect(x, y, t_init=t_start, **kwargs, **kw) + for _ in range(dt): + _, x, y = p.__next__() + m = ~(isnan(x) + isnan(y)) + i_end = full(x.shape, -1, dtype="i4") + if m.any(): + # Id eddies for each alive particle in start contour + i_end[m] = poly_indexs(x[m], y[m], *contours_end) + shape = (contours_start[0].shape[0], 2) + # Get target for each contour + i_target, pct_target = full(shape, -1, dtype="i4"), zeros(shape, dtype="f8") + nb_end = contours_end[0].shape[0] + get_targets(i_start, i_end, i_target, pct_target, nb_end) + return i_target, pct_target.astype('i1') + + def particle_candidate( c, eddies, @@ -120,13 +156,8 @@ def particle_candidate( translate_start = where(m_start)[0] # Create particles in specified contour - if contour_start == "speed": - x, y, i_start = e.create_particles(step_mesh, intern=True) - elif contour_start == "effective": - x, y, i_start = e.create_particles(step_mesh, intern=False) - else: - x, y, i_start = e.create_particles(step_mesh, intern=True) - print("The contour_start was not correct, speed contour is used") + intern = False if contour_start == "effective" else True + x, y, i_start = e.create_particles(step_mesh, intern=intern) # Advection t_end, x, y = advect(x, y, c, t_start, **kwargs) @@ -138,18 +169,54 @@ def particle_candidate( translate_end = where(m_end)[0] # Id eddies for each alive particle in specified contour - if contour_end == "speed": - i_end = e_end.contains(x, y, intern=True) - elif contour_end == "effective": - i_end = e_end.contains(x, y, intern=False) - else: - i_end = e_end.contains(x, y, intern=True) - print("The contour_end was not correct, speed contour is used") + intern = False if contour_end == "effective" else True + i_end = e_end.contains(x, y, intern=intern) # compute matrix and fill target array get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct) +@njit(cache=True) +def get_targets(i_start, i_end, i_target, pct, nb_end): + """Compute target observation and associated percentages + + :param array(int) i_start: indices in time 0 + :param array(int) i_end: indices in time N + :param array(int) i_target: corresponding obs where particles are advected + :param array(int) pct: corresponding percentage of avected particles + :param int nb_end: number of contour at time N + """ + nb_start = i_target.shape[0] + # Matrix which will store count for every couple + counts = zeros((nb_start, nb_end), dtype=nb_types.int32) + # Number of particles in each origin observation + ref = zeros(nb_start, dtype=nb_types.int32) + # For each particle + for i in range(i_start.size): + i_end_ = i_end[i] + i_start_ = i_start[i] + ref[i_start_] += 1 + if i_end_ != -1: + counts[i_start_, i_end_] += 1 + # From i to j + for i in range(nb_start): + for j in range(nb_end): + count = counts[i, j] + if count == 0: + continue + pct_ = count / ref[i] * 100 + pct_0 = pct[i, 0] + # If percent is higher than previous stored in rank 0 + if pct_ > pct_0: + pct[i, 1] = pct_0 + pct[i, 0] = pct_ + i_target[i, 1] = i_target[i, 0] + i_target[i, 0] = j + # If percent is higher than previous stored in rank 1 + elif pct_ > pct[i, 1]: + pct[i, 1] = pct_ + i_target[i, 1] = j + @njit(cache=True) def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): """Compute target observation and associated percentages @@ -278,3 +345,42 @@ def keep_tracks_by_date(self, date, nb_days): mask[i] = True return self.extract_with_mask(mask) + + def particle_candidate_atlas(self, cube, space_step, dt, start_intern=False, end_intern=False, **kwargs): + """Select particles within eddies, advect them, return target observation and associated percentages + + :param `~py_eddy_tracker.dataset.grid.GridCollection` cube: GridCollection with speed for particles + :param float space_step: step between 2 particles + :param int dt: duration of advection + :param bool start_intern: Use intern or extern contour at injection, defaults to False + :param bool end_intern: Use intern or extern contour at end of advection, defaults to False + :params dict kwargs: dict of params given to advection + :return (np.array,np.array): return target index and percent associate + """ + t_start, t_end = int(self.period[0]), int(self.period[1]) + # Pre-compute to get time index + i_sort, i_start, i_end = window_index(self.time, arange(t_start, t_end + 1), .5) + # Out shape + shape = (len(self), 2) + i_target, pct = full(shape, -1, dtype="i4"), zeros(shape, dtype="i1") + # Backward or forward + times = arange(t_start, t_end - dt) if dt > 0 else arange(t_start + dt, t_end) + for t in times: + # Get index for origin + i = t - t_start + indexs0 = i_sort[i_start[i]:i_end[i]] + # Get index for end + i = t + dt - t_start + indexs1 = i_sort[i_start[i]:i_end[i]] + # Get contour data + contours0 = [self[label][indexs0] for label in self.intern(start_intern)] + contours1 = [self[label][indexs1] for label in self.intern(end_intern)] + # Get local result + i_target_, pct_ = particle_candidate_step(t, contours0, contours1, space_step, dt, cube, **kwargs) + # Merge result + m = i_target_ != -1 + i_target_[m] = indexs1[i_target_[m]] + i_target[indexs0] = i_target_ + pct[indexs0] = pct_ + return i_target, pct + \ No newline at end of file diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 4a884705..b633fc40 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -8,6 +8,7 @@ import netCDF4 import zarr +from numba.typed import List from numba import njit from numpy import ( arange, @@ -110,6 +111,8 @@ class NetworkObservations(GroupEddiesObservations): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._index_network = None + self._index_segment_track = None + self._segment_track_array = None def find_segments_relative(self, obs, stopped=None, order=1): """ @@ -161,16 +164,64 @@ def index_network(self): self._index_network = build_index(self.track) return self._index_network - def network_size(self, id_networks): + @property + def index_segment_track(self): + if self._index_segment_track is None: + self._index_segment_track = build_index(self.segment_track_array) + return self._index_segment_track + + def segment_size(self): + return self.index_segment_track[1] - self.index_segment_track[0] + + @property + def ref_segment_track_index(self): + return self.index_segment_track[2] + + @property + def ref_index(self): + return self.index_network[2] + + def network_segment_size(self, id_networks=None): + """Get number of segment by network + + :return array: + """ + i0, i1, ref = build_index(self.track[self.index_segment_track[0]]) + if id_networks is None: + return i1-i0 + else: + i = id_networks - ref + return i1[i] - i0[i] + + def network_size(self, id_networks=None): """ Return size for specified network - :param list,array id_networks: ids to identify network + :param list,array, None id_networks: ids to identify network """ - i = id_networks - self.index_network[2] - i_start, i_stop = self.index_network[0][i], self.index_network[1][i] - return i_stop - i_start + if id_networks is None: + return self.index_network[1] - self.index_network[0] + else: + i = id_networks - self.index_network[2] + return self.index_network[1][i] - self.index_network[0][i] + def unique_segment_to_id(self, id_unique): + """Return id network and id segment for a unique id + + :param array id_unique: + """ + i = self.index_segment_track[0][id_unique] - self.ref_segment_track_index + return self.track[i], self.segment[i] + + def segment_slice(self, id_network, id_segment): + """ + Return slice for one segment + + :param int id_network: id to identify network + :param int id_segment: id to identify segment + """ + raise Exception('need to be implemented') + def network_slice(self, id_network): """ Return slice for one network @@ -487,6 +538,7 @@ def segment_relative_order(self, seg_origine): """ Compute the relative order of each segment to the chosen segment """ + self.only_one_network() i_s, i_e, i_ref = build_index(self.segment) segment_connexions = self.connexions() relative_tr = -ones(i_s.shape, dtype="i4") @@ -634,7 +686,7 @@ def only_one_network(self): if there are more than one network """ _, i_start, _ = self.index_network - if len(i_start) > 1: + if i_start.size > 1: raise Exception("Several networks") def position_filter(self, median_half_window, loess_half_window): @@ -832,7 +884,7 @@ def map_segment(self, method, y, same=True, **kw): out = empty(y.shape, **kw) else: out = list() - for i, b0, b1 in self.iter_on(self.segment_track_array): + for i, _, _ in self.iter_on(self.segment_track_array): res = method(y[i]) if same: out[i] = res @@ -1025,7 +1077,9 @@ def extract_event(self, indices): @property def segment_track_array(self): """Return a unique segment id when multiple networks are considered""" - return build_unique_array(self.segment, self.track) + if self._segment_track_array is None: + self._segment_track_array = build_unique_array(self.segment, self.track) + return self._segment_track_array def birth_event(self): """Extract birth events. @@ -1081,7 +1135,7 @@ def merging_event(self, triplet=False, only_index=False): if triplet: if only_index: - return (idx_m1, idx_m0, idx_m0_stop) + return array(idx_m1), array(idx_m0), array(idx_m0_stop) else: return ( self.extract_event(idx_m1), @@ -1119,12 +1173,12 @@ def splitting_event(self, triplet=False, only_index=False): if triplet: if only_index: - return (idx_s0, idx_s1, idx_s1_start) + return array(idx_s0), array(idx_s1), array(idx_s1_start) else: return ( - self.extract_event(list(idx_s0)), - self.extract_event(list(idx_s1)), - self.extract_event(list(idx_s1_start)), + self.extract_event(idx_s0), + self.extract_event(idx_s1), + self.extract_event(idx_s1_start), ) else: @@ -1159,14 +1213,108 @@ def dissociate_network(self): self.next_obs[:] = translate[n] self.previous_obs[:] = translate[p] + def network_segment(self, id_network, id_segment): + return self.extract_with_mask(self.segment_slice(id_network, id_segment)) + def network(self, id_network): return self.extract_with_mask(self.network_slice(id_network)) + def networks_mask(self, id_networks, segment=False): + if segment: + return generate_mask_from_ids(id_networks, self.track.size, *self.index_segment_track) + else: + return generate_mask_from_ids(id_networks, self.track.size, *self.index_network) + def networks(self, id_networks): - m = zeros(self.track.shape, dtype=bool) - for tr in id_networks: - m[self.network_slice(tr)] = True - return self.extract_with_mask(m) + return self.extract_with_mask(generate_mask_from_ids(id_networks, self.track.size, *self.index_network)) + + @property + def nb_network(self): + """ + Count and return number of network + """ + return (self.network_size() != 0).sum() + + @property + def nb_segment(self): + """ + Count and return number of segment in all network + """ + return self.index_segment_track[0].size + + def identify_in(self, other, size_min=1, segment=False): + """ + Return couple of segment or network which are equal + + :param other: other atlas to compare + :param int size_min: number of observation in network/segment + :param bool segment: segment mode + """ + if segment: + counts = self.segment_size(), other.segment_size() + i_self_ref, i_other_ref = self.ref_segment_track_index, other.ref_segment_track_index + var_id = 'segment' + else: + counts = self.network_size(), other.network_size() + i_self_ref, i_other_ref = self.ref_index, other.ref_index + var_id = 'track' + # object to contain index of couple + in_self, in_other = list(), list() + # We iterate on item of same size + for i_self, i_other, i0, _ in self.align_on(other, counts, all_ref=True): + if i0 < size_min: + continue + if isinstance(i_other, slice): + i_other = arange(i_other.start, i_other.stop) + # All_ref will give all item of self, sometime there is no things to compare with other + if i_other.size == 0: + id_self = i_self + i_self_ref + in_self.append(id_self) + in_other.append(-ones(id_self.shape, dtype=id_self.dtype)) + continue + if isinstance(i_self, slice): + i_self = arange(i_self.start, i_self.stop) + # We get absolute id + id_self, id_other = i_self + i_self_ref, i_other + i_other_ref + # We compute mask to select data + m_self, m_other = self.networks_mask(id_self, segment), other.networks_mask(id_other, segment) + + # We extract obs + obs_self, obs_other = self.obs[m_self], other.obs[m_other] + x1, y1, t1 = obs_self['lon'], obs_self['lat'], obs_self['time'] + x2, y2, t2 = obs_other['lon'], obs_other['lat'], obs_other['time'] + + if segment: + ids1 = build_unique_array(obs_self['segment'], obs_self['track']) + ids2 = build_unique_array(obs_other['segment'], obs_other['track']) + label1 = self.segment_track_array[m_self] + label2 = other.segment_track_array[m_other] + else: + label1, label2 = ids1, ids2 = obs_self[var_id], obs_other[var_id] + # For each item we get index to sort + i01, indexs1, id1 = list(), List(), list() + for sl_self, id_, _ in self.iter_on(ids1): + i01.append(sl_self.start) + indexs1.append(obs_self[sl_self].argsort(order=['time', 'lon', 'lat'])) + id1.append(label1[sl_self.start]) + i02, indexs2, id2 = list(), List(), list() + for sl_other, _, _ in other.iter_on(ids2): + i02.append(sl_other.start) + indexs2.append(obs_other[sl_other].argsort(order=['time', 'lon', 'lat'])) + id2.append(label2[sl_other.start]) + + id1, id2 = array(id1), array(id2) + # We search item from self in item of others + i_local_target = same_position(x1, y1, t1, x2, y2, t2, array(i01), array(i02), indexs1, indexs2) + + # -1 => no item found in other dataset + m = i_local_target != -1 + in_self.append(id1) + track2_ = -ones(id1.shape, dtype='i4') + track2_[m] = id2[i_local_target[m]] + in_other.append(track2_) + + return concatenate(in_self), concatenate(in_other) @classmethod def __tag_segment(cls, seg, tag, groups, connexions): @@ -1647,6 +1795,27 @@ def date2file(julian_day): ) return itf_final, ptf_final + def mask_obs_close_event(self, merging=True, spliting=True, dt=3): + """Build a mask of close observation from event + + :param n: Network + :param bool merging: select merging event, defaults to True + :param bool spliting: select splitting event, defaults to True + :param int dt: delta of time max , defaults to 3 + :return array: mask + """ + m = zeros(len(self), dtype='bool') + if merging: + i_target, ip1, ip2 = self.merging_event(triplet=True, only_index=True) + mask_follow_obs(m, self.previous_obs, self.time, ip1, dt) + mask_follow_obs(m, self.previous_obs, self.time, ip2, dt) + mask_follow_obs(m, self.next_obs, self.time, i_target, dt) + if spliting: + i_target, in1, in2 = self.splitting_event(triplet=True, only_index=True) + mask_follow_obs(m, self.next_obs, self.time, in1, dt) + mask_follow_obs(m, self.next_obs, self.time, in2, dt) + mask_follow_obs(m, self.previous_obs, self.time, i_target, dt) + return m class Network: __slots__ = ( @@ -1864,3 +2033,77 @@ def new_numbering(segs, start=0): @njit(cache=True) def ptp(values): return values.max() - values.min() + +@njit(cache=True) +def generate_mask_from_ids(id_networks, nb, istart, iend, i0): + """From list of id, we generate a mask + + :param array id_networks: list of ids + :param int nb: size of mask + :param array istart: first index for each id from :py:meth:`~py_eddy_tracker.generic.build_index` + :param array iend: last index for each id from :py:meth:`~py_eddy_tracker.generic.build_index` + :param int i0: ref index from :py:meth:`~py_eddy_tracker.generic.build_index` + :return array: return a mask + """ + m = zeros(nb, dtype='bool') + for i in id_networks: + for j in range(istart[i-i0], iend[i-i0]): + m[j] = True + return m + +@njit(cache=True) +def same_position(x0, y0, t0, x1, y1, t1, i00, i01, i0, i1): + """Return index of track/segment found in other dataset + + :param array x0: + :param array y0: + :param array t0: + :param array x1: + :param array y1: + :param array t1: + :param array i00: First index of track/segment/network in dataset0 + :param array i01: First index of track/segment/network in dataset1 + :param List(array) i0: list of array which contain index to order dataset0 + :param List(array) i1: list of array which contain index to order dataset1 + :return array: index of dataset1 which match with dataset0, -1 => no match + """ + nb0, nb1 = i00.size, i01.size + i_target = -ones(nb0, dtype='i4') + # To avoid to compare multiple time, if already match + used1 = zeros(nb1, dtype='bool') + for j0 in range(nb0): + for j1 in range(nb1): + if used1[j1]: + continue + test = True + for i0_, i1_ in zip(i0[j0], i1[j1]): + i0_ += i00[j0] + i1_ += i01[j1] + if t0[i0_] != t1[i1_] or x0[i0_] != x1[i1_] or y0[i0_] != y1[i1_]: + test = False + break + if test: + i_target[j0] = j1 + used1[j1] = True + break + return i_target + +@njit(cache=True) +def mask_follow_obs(m, next_obs, time, indexs, dt=3): + """Generate a mask to select close obs in time from index + + :param array m: mask to fill with True + :param array next_obs: index of the next observation + :param array time: time of each obs + :param array indexs: index to start follow + :param int dt: delta of time max from index, defaults to 3 + """ + for i in indexs: + t0 = time[i] + m[i] = True + i_next = next_obs[i] + dt_ = abs(time[i_next] - t0) + while dt_ < dt and i_next != -1: + m[i_next] = True + i_next = next_obs[i_next] + dt_ = abs(time[i_next] - t0) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 651aaa9a..384f537f 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -11,6 +11,7 @@ import packaging.version import zarr from matplotlib.cm import get_cmap +from matplotlib.collections import LineCollection from matplotlib.collections import PolyCollection from matplotlib.colors import Normalize from netCDF4 import Dataset @@ -70,7 +71,7 @@ poly_indexs, reduce_size, vertice_overlap, - winding_number_poly, + create_meshed_particles, ) logger = logging.getLogger("pet") @@ -576,12 +577,12 @@ def iter_on(self, xname, bins=None): Yield observation group for each bin. :param str,array xname: - :param array bins: bounds of each bin , - :return: index or mask, bound low, bound up + :param array bins: bounds of each bin + :yield array,float,float: index in self, lower bound, upper bound .. minigallery:: py_eddy_tracker.EddiesObservations.iter_on """ - x = self[xname] if isinstance(xname, str) else xname + x = self.parse_varname(xname) d = x[1:] - x[:-1] if bins is None: bins = arange(x.min(), x.max() + 2) @@ -617,14 +618,23 @@ def iter_on(self, xname, bins=None): i_bins = i[i0_] yield slice(i0_, i1_), bins[i_bins], bins[i_bins + 1] - def align_on(self, other, var_name="time", **kwargs): + def align_on(self, other, var_name="time", all_ref=False, **kwargs): """ - Align the time indices of two datasets. + Align the variable indices of two datasets. + + :param other: other compare with self + :param str,tuple var_name: variable name to align or two array, defaults to "time" + :param bool all_ref: yield all value of ref, if false only common value, defaults to False + :yield array,array,float,float: index in self, index in other, lower bound, upper bound .. minigallery:: py_eddy_tracker.EddiesObservations.align_on """ - iter_self = self.iter_on(var_name, **kwargs) - iter_other = other.iter_on(var_name, **kwargs) + if isinstance(var_name, str): + iter_self = self.iter_on(var_name, **kwargs) + iter_other = other.iter_on(var_name, **kwargs) + else: + iter_self = self.iter_on(var_name[0], **kwargs) + iter_other = other.iter_on(var_name[1], **kwargs) indexs_other, b0_other, b1_other = iter_other.__next__() for indexs_self, b0_self, b1_self in iter_self: if b0_self > b0_other: @@ -634,6 +644,8 @@ def align_on(self, other, var_name="time", **kwargs): except StopIteration: break if b0_self < b0_other: + if all_ref: + yield indexs_self, empty(0, dtype=indexs_self.dtype), b0_self, b1_self continue yield indexs_self, indexs_other, b0_self, b1_self @@ -1057,11 +1069,6 @@ def compare_units(input_unit, output_unit, name): @classmethod def from_array(cls, arrays, **kwargs): nb = arrays["time"].size - # if hasattr(handler, "track_array_variables"): - # kwargs["track_array_variables"] = handler.track_array_variables - # kwargs["array_variables"] = handler.array_variables.split(",") - # if len(handler.track_extra_variables) > 1: - # kwargs["track_extra_variables"] = handler.track_extra_variables.split(",") eddies = cls(size=nb, **kwargs) for k, v in arrays.items(): eddies.obs[k] = v @@ -2036,6 +2043,26 @@ def format_label(self, label): nb_obs=len(self), ) + def display_color(self, ax, field, intern=False, **kwargs): + """Plot colored contour of eddies + + :param matplotlib.axes.Axes ax: matplotlib axe used to draw + :param str,array field: color field + :param bool intern: if True, draw the speed contour + :param dict kwargs: look at :py:meth:`matplotlib.collections.LineCollection` + + .. minigallery:: py_eddy_tracker.EddiesObservations.display_color + """ + xname, yname = self.intern(intern) + x, y = self[xname], self[yname] + c = self.parse_varname(field) + cmap = get_cmap(kwargs.pop('cmap', 'Spectral_r')) + cmin, cmax = kwargs.pop('vmin', c.min()), kwargs.pop('vmax', c.max()) + colors = cmap((c - cmin) / (cmax - cmin)) + lines = LineCollection([create_vertice(i,j) for i,j in zip(x,y)], colors=colors, **kwargs) + ax.add_collection(lines) + return lines + def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): """Plot the speed and effective (dashed) contour of the eddies @@ -2353,7 +2380,7 @@ def create_particles(self, step, intern=True): """ xname, yname = self.intern(intern) - return _create_meshed_particles(self[xname], self[yname], step) + return create_meshed_particles(self[xname], self[yname], step) @njit(cache=True) @@ -2518,24 +2545,6 @@ def grid_stat(x_c, y_c, grid, x, y, result, circular=False, method="mean"): result[elt] = v_max -@njit(cache=True) -def _create_meshed_particles(lons, lats, step): - x_out, y_out, i_out = list(), list(), list() - for i, (lon, lat) in enumerate(zip(lons, lats)): - lon_min, lon_max = lon.min(), lon.max() - lat_min, lat_max = lat.min(), lat.max() - lon_min -= lon_min % step - lon_max -= lon_max % step - step * 2 - lat_min -= lat_min % step - lat_max -= lat_max % step - step * 2 - - for x in arange(lon_min, lon_max, step): - for y in arange(lat_min, lat_max, step): - if winding_number_poly(x, y, create_vertice(*reduce_size(lon, lat))): - x_out.append(x), y_out.append(y), i_out.append(i) - return array(x_out), array(y_out), array(i_out) - - class VirtualEddiesObservations(EddiesObservations): """Class to work with virtual obs""" diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index bb9ac79e..6baf5ad8 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -287,6 +287,27 @@ def close_center(x0, y0, x1, y1, delta=0.1): return array(i), array(j), array(c) +@njit(cache=True) +def create_meshed_particles(lons, lats, step): + x_out, y_out, i_out = list(), list(), list() + nb = lons.shape[0] + for i in range(nb): + lon, lat = lons[i], lats[i] + vertice = create_vertice(*reduce_size(lon, lat)) + lon_min, lon_max = lon.min(), lon.max() + lat_min, lat_max = lat.min(), lat.max() + y0 = lat_min - lat_min % step + x = lon_min - lon_min % step + while x <= lon_max: + y = y0 + while y <= lat_max: + if winding_number_poly(x, y, vertice): + x_out.append(x), y_out.append(y), i_out.append(i) + y += step + x += step + return array(x_out), array(y_out), array(i_out) + + @njit(cache=True, fastmath=True) def bbox_intersection(x0, y0, x1, y1): """ @@ -503,7 +524,7 @@ def fit_circle(x, y): norme = (x[1:] - x_mean) ** 2 + (y[1:] - y_mean) ** 2 norme_max = norme.max() - scale = norme_max ** 0.5 + scale = norme_max**0.5 # Form matrix equation and solve it # Maybe put f4 @@ -514,7 +535,7 @@ def fit_circle(x, y): (x0, y0, radius), _, _, _ = lstsq(datas, norme / norme_max) # Unscale data and get circle variables - radius += x0 ** 2 + y0 ** 2 + radius += x0**2 + y0**2 radius **= 0.5 x0 *= scale y0 *= scale @@ -546,21 +567,21 @@ def fit_ellipse(x, y): """ nb = x.shape[0] datas = ones((nb, 5), dtype=x.dtype) - datas[:, 0] = x ** 2 + datas[:, 0] = x**2 datas[:, 1] = x * y - datas[:, 2] = y ** 2 + datas[:, 2] = y**2 datas[:, 3] = x datas[:, 4] = y (a, b, c, d, e), _, _, _ = lstsq(datas, ones(nb, dtype=x.dtype)) - det = b ** 2 - 4 * a * c + det = b**2 - 4 * a * c if det > 0: print(det) x0 = (2 * c * d - b * e) / det y0 = (2 * a * e - b * d) / det - AB1 = 2 * (a * e ** 2 + c * d ** 2 - b * d * e - det) + AB1 = 2 * (a * e**2 + c * d**2 - b * d * e - det) AB2 = a + c - AB3 = ((a - c) ** 2 + b ** 2) ** 0.5 + AB3 = ((a - c) ** 2 + b**2) ** 0.5 A = -((AB1 * (AB2 + AB3)) ** 0.5) / det B = -((AB1 * (AB2 - AB3)) ** 0.5) / det theta = arctan((c - a - AB3) / b) @@ -621,7 +642,7 @@ def fit_circle_(x, y): # Linear regression (a, b, c), _, _, _ = lstsq(datas, x[1:] ** 2 + y[1:] ** 2) x0, y0 = a / 2.0, b / 2.0 - radius = (c + x0 ** 2 + y0 ** 2) ** 0.5 + radius = (c + x0**2 + y0**2) ** 0.5 err = shape_error(x, y, x0, y0, radius) return x0, y0, radius, err @@ -646,14 +667,14 @@ def shape_error(x, y, x0, y0, r): :rtype: float """ # circle area - c_area = (r ** 2) * pi + c_area = (r**2) * pi p_area = poly_area(x, y) nb = x.shape[0] x, y = x.copy(), y.copy() # Find distance between circle center and polygon for i in range(nb): dx, dy = x[i] - x0, y[i] - y0 - rd = r / (dx ** 2 + dy ** 2) ** 0.5 + rd = r / (dx**2 + dy**2) ** 0.5 if rd < 1: x[i] = x0 + dx * rd y[i] = y0 + dy * rd diff --git a/tests/test_grid.py b/tests/test_grid.py index 759a40e1..0e6dd586 100644 --- a/tests/test_grid.py +++ b/tests/test_grid.py @@ -7,7 +7,15 @@ G = RegularGridDataset(get_demo_path("mask_1_60.nc"), "lon", "lat") X = 0.025 -contour = Path(((-X, 0), (X, 0), (X, X), (-X, X), (-X, 0),)) +contour = Path( + ( + (-X, 0), + (X, 0), + (X, X), + (-X, X), + (-X, 0), + ) +) # contour @@ -91,7 +99,11 @@ def test_convolution(): ) g = RegularGridDataset.with_array( coordinates=("x", "y"), - datas=dict(z=z, x=arange(0, 6, 0.5), y=arange(0, 5, 0.5),), + datas=dict( + z=z, + x=arange(0, 6, 0.5), + y=arange(0, 5, 0.5), + ), centered=True, ) diff --git a/tests/test_poly.py b/tests/test_poly.py index cca53635..a780f64d 100644 --- a/tests/test_poly.py +++ b/tests/test_poly.py @@ -22,7 +22,7 @@ def test_fit_circle(): x0, y0, r, err = fit_circle(*V) assert x0 == approx(2.5, rel=1e-10) assert y0 == approx(-9.5, rel=1e-10) - assert r == approx(2 ** 0.5 / 2, rel=1e-10) + assert r == approx(2**0.5 / 2, rel=1e-10) assert err == approx((1 - 2 / pi) * 100, rel=1e-10) From 1e6d4f4b28b81afd6bc159b496df7853f7b6f445 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 19 Sep 2022 10:15:47 +0200 Subject: [PATCH 069/115] update changelog --- CHANGELOG.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 57fd7551..b8cad2f4 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,6 +11,8 @@ and this project adheres to `Semantic Versioning Date: Mon, 19 Sep 2022 10:15:47 +0200 Subject: [PATCH 070/115] isort/black --- check.sh | 10 +- doc/grid_identification.rst | 43 +++---- doc/grid_load_display.rst | 34 +++--- doc/spectrum.rst | 61 ++++++---- examples/01_general_things/pet_storage.py | 2 +- .../pet_eddy_detection_ACC.py | 16 ++- .../pet_interp_grid_on_dataset.py | 2 +- .../pet_statistics_on_identification.py | 2 +- examples/06_grid_manipulation/pet_lavd.py | 4 +- examples/07_cube_manipulation/pet_cube.py | 3 +- examples/07_cube_manipulation/pet_fsle_med.py | 8 +- .../pet_display_field.py | 2 +- .../pet_display_track.py | 2 +- .../08_tracking_manipulation/pet_one_track.py | 2 +- .../pet_select_track_across_area.py | 2 +- .../pet_track_anim_matplotlib_animation.py | 2 +- .../pet_birth_and_death.py | 2 +- .../pet_center_count.py | 4 +- .../pet_geographic_stats.py | 2 +- .../10_tracking_diagnostics/pet_groups.py | 2 +- examples/10_tracking_diagnostics/pet_histo.py | 2 +- .../10_tracking_diagnostics/pet_lifetime.py | 2 +- .../10_tracking_diagnostics/pet_pixel_used.py | 2 +- .../pet_propagation.py | 2 +- .../12_external_data/pet_drifter_loopers.py | 4 +- examples/14_generic_tools/pet_visvalingam.py | 2 +- examples/16_network/pet_follow_particle.py | 3 +- examples/16_network/pet_group_anim.py | 3 +- examples/16_network/pet_ioannou_2017_case.py | 6 +- .../16_network/pet_replay_segmentation.py | 4 +- examples/16_network/pet_segmentation_anim.py | 2 +- setup.cfg | 15 ++- setup.py | 3 +- share/fig.py | 6 +- src/py_eddy_tracker/__init__.py | 2 +- src/py_eddy_tracker/appli/eddies.py | 5 +- src/py_eddy_tracker/appli/gui.py | 2 +- src/py_eddy_tracker/appli/network.py | 107 +++++++++++------- src/py_eddy_tracker/data/__init__.py | 2 +- src/py_eddy_tracker/dataset/grid.py | 17 +-- src/py_eddy_tracker/eddy_feature.py | 3 +- src/py_eddy_tracker/generic.py | 3 +- src/py_eddy_tracker/gui.py | 4 +- src/py_eddy_tracker/misc.py | 4 +- src/py_eddy_tracker/observations/groups.py | 40 ++++--- src/py_eddy_tracker/observations/network.py | 91 +++++++++------ .../observations/observation.py | 28 ++--- src/py_eddy_tracker/observations/tracking.py | 2 +- src/py_eddy_tracker/poly.py | 5 +- src/py_eddy_tracker/tracking.py | 5 +- src/scripts/EddyTranslate | 2 +- tests/test_track.py | 2 +- 52 files changed, 336 insertions(+), 249 deletions(-) diff --git a/check.sh b/check.sh index b158028a..a402bf52 100644 --- a/check.sh +++ b/check.sh @@ -1,7 +1,5 @@ -isort src tests examples -black src tests examples -blackdoc src tests examples -flake8 tests examples src --count --select=E9,F63,F7,F82 --show-source --statistics -# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide -flake8 tests examples src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics +isort . +black . +blackdoc . +flake8 . python -m pytest -vv --cov py_eddy_tracker --cov-report html diff --git a/doc/grid_identification.rst b/doc/grid_identification.rst index c645f80c..2cc3fb52 100644 --- a/doc/grid_identification.rst +++ b/doc/grid_identification.rst @@ -47,38 +47,42 @@ Activate verbose .. code-block:: python from py_eddy_tracker import start_logger - start_logger().setLevel('DEBUG') # Available options: ERROR, WARNING, INFO, DEBUG + + start_logger().setLevel("DEBUG") # Available options: ERROR, WARNING, INFO, DEBUG Run identification .. code-block:: python from datetime import datetime + h = RegularGridDataset(grid_name, lon_name, lat_name) - h.bessel_high_filter('adt', 500, order=3) + h.bessel_high_filter("adt", 500, order=3) date = datetime(2019, 2, 23) a, c = h.eddy_identification( - 'adt', 'ugos', 'vgos', # Variables used for identification - date, # Date of identification - 0.002, # step between two isolines of detection (m) - pixel_limit=(5, 2000), # Min and max pixel count for valid contour - shape_error=55, # Error max (%) between ratio of circle fit and contour - ) + "adt", + "ugos", + "vgos", # Variables used for identification + date, # Date of identification + 0.002, # step between two isolines of detection (m) + pixel_limit=(5, 2000), # Min and max pixel count for valid contour + shape_error=55, # Error max (%) between ratio of circle fit and contour + ) Plot the resulting identification .. code-block:: python - fig = plt.figure(figsize=(15,7)) - ax = fig.add_axes([.03,.03,.94,.94]) - ax.set_title('Eddies detected -- Cyclonic(red) and Anticyclonic(blue)') - ax.set_ylim(-75,75) - ax.set_xlim(0,360) - ax.set_aspect('equal') - a.display(ax, color='b', linewidth=.5) - c.display(ax, color='r', linewidth=.5) + fig = plt.figure(figsize=(15, 7)) + ax = fig.add_axes([0.03, 0.03, 0.94, 0.94]) + ax.set_title("Eddies detected -- Cyclonic(red) and Anticyclonic(blue)") + ax.set_ylim(-75, 75) + ax.set_xlim(0, 360) + ax.set_aspect("equal") + a.display(ax, color="b", linewidth=0.5) + c.display(ax, color="r", linewidth=0.5) ax.grid() - fig.savefig('share/png/eddies.png') + fig.savefig("share/png/eddies.png") .. image:: ../share/png/eddies.png @@ -87,7 +91,8 @@ Save identification data .. code-block:: python from netCDF import Dataset - with Dataset(date.strftime('share/Anticyclonic_%Y%m%d.nc'), 'w') as h: + + with Dataset(date.strftime("share/Anticyclonic_%Y%m%d.nc"), "w") as h: a.to_netcdf(h) - with Dataset(date.strftime('share/Cyclonic_%Y%m%d.nc'), 'w') as h: + with Dataset(date.strftime("share/Cyclonic_%Y%m%d.nc"), "w") as h: c.to_netcdf(h) diff --git a/doc/grid_load_display.rst b/doc/grid_load_display.rst index 2e570274..2f0e3765 100644 --- a/doc/grid_load_display.rst +++ b/doc/grid_load_display.rst @@ -7,7 +7,12 @@ Loading grid .. code-block:: python from py_eddy_tracker.dataset.grid import RegularGridDataset - grid_name, lon_name, lat_name = 'share/nrt_global_allsat_phy_l4_20190223_20190226.nc', 'longitude', 'latitude' + + grid_name, lon_name, lat_name = ( + "share/nrt_global_allsat_phy_l4_20190223_20190226.nc", + "longitude", + "latitude", + ) h = RegularGridDataset(grid_name, lon_name, lat_name) Plotting grid @@ -15,14 +20,15 @@ Plotting grid .. code-block:: python from matplotlib import pyplot as plt + fig = plt.figure(figsize=(14, 12)) - ax = fig.add_axes([.02, .51, .9, .45]) - ax.set_title('ADT (m)') + ax = fig.add_axes([0.02, 0.51, 0.9, 0.45]) + ax.set_title("ADT (m)") ax.set_ylim(-75, 75) - ax.set_aspect('equal') - m = h.display(ax, name='adt', vmin=-1, vmax=1) + ax.set_aspect("equal") + m = h.display(ax, name="adt", vmin=-1, vmax=1) ax.grid(True) - plt.colorbar(m, cax=fig.add_axes([.94, .51, .01, .45])) + plt.colorbar(m, cax=fig.add_axes([0.94, 0.51, 0.01, 0.45])) Filtering @@ -30,27 +36,27 @@ Filtering .. code-block:: python h = RegularGridDataset(grid_name, lon_name, lat_name) - h.bessel_high_filter('adt', 500, order=3) + h.bessel_high_filter("adt", 500, order=3) Save grid .. code-block:: python - h.write('/tmp/grid.nc') + h.write("/tmp/grid.nc") Add second plot .. code-block:: python - ax = fig.add_axes([.02, .02, .9, .45]) - ax.set_title('ADT Filtered (m)') - ax.set_aspect('equal') + ax = fig.add_axes([0.02, 0.02, 0.9, 0.45]) + ax.set_title("ADT Filtered (m)") + ax.set_aspect("equal") ax.set_ylim(-75, 75) - m = h.display(ax, name='adt', vmin=-.1, vmax=.1) + m = h.display(ax, name="adt", vmin=-0.1, vmax=0.1) ax.grid(True) - plt.colorbar(m, cax=fig.add_axes([.94, .02, .01, .45])) - fig.savefig('share/png/filter.png') + plt.colorbar(m, cax=fig.add_axes([0.94, 0.02, 0.01, 0.45])) + fig.savefig("share/png/filter.png") .. image:: ../share/png/filter.png \ No newline at end of file diff --git a/doc/spectrum.rst b/doc/spectrum.rst index d751b909..5a42cbec 100644 --- a/doc/spectrum.rst +++ b/doc/spectrum.rst @@ -11,7 +11,7 @@ Load data raw = RegularGridDataset(grid_name, lon_name, lat_name) filtered = RegularGridDataset(grid_name, lon_name, lat_name) - filtered.bessel_low_filter('adt', 150, order=3) + filtered.bessel_low_filter("adt", 150, order=3) areas = dict( sud_pacific=dict(llcrnrlon=188, urcrnrlon=280, llcrnrlat=-64, urcrnrlat=-7), @@ -23,24 +23,34 @@ Compute and display spectrum .. code-block:: python - fig = plt.figure(figsize=(10,6)) + fig = plt.figure(figsize=(10, 6)) ax = fig.add_subplot(111) - ax.set_title('Spectrum') - ax.set_xlabel('km') + ax.set_title("Spectrum") + ax.set_xlabel("km") for name_area, area in areas.items(): - lon_spec, lat_spec = raw.spectrum_lonlat('adt', area=area) - mappable = ax.loglog(*lat_spec, label='lat %s raw' % name_area)[0] - ax.loglog(*lon_spec, label='lon %s raw' % name_area, color=mappable.get_color(), linestyle='--') - - lon_spec, lat_spec = filtered.spectrum_lonlat('adt', area=area) - mappable = ax.loglog(*lat_spec, label='lat %s high' % name_area)[0] - ax.loglog(*lon_spec, label='lon %s high' % name_area, color=mappable.get_color(), linestyle='--') - - ax.set_xscale('log') + lon_spec, lat_spec = raw.spectrum_lonlat("adt", area=area) + mappable = ax.loglog(*lat_spec, label="lat %s raw" % name_area)[0] + ax.loglog( + *lon_spec, + label="lon %s raw" % name_area, + color=mappable.get_color(), + linestyle="--" + ) + + lon_spec, lat_spec = filtered.spectrum_lonlat("adt", area=area) + mappable = ax.loglog(*lat_spec, label="lat %s high" % name_area)[0] + ax.loglog( + *lon_spec, + label="lon %s high" % name_area, + color=mappable.get_color(), + linestyle="--" + ) + + ax.set_xscale("log") ax.legend() ax.grid() - fig.savefig('share/png/spectrum.png') + fig.savefig("share/png/spectrum.png") .. image:: ../share/png/spectrum.png @@ -49,18 +59,23 @@ Compute and display spectrum ratio .. code-block:: python - fig = plt.figure(figsize=(10,6)) + fig = plt.figure(figsize=(10, 6)) ax = fig.add_subplot(111) - ax.set_title('Spectrum ratio') - ax.set_xlabel('km') + ax.set_title("Spectrum ratio") + ax.set_xlabel("km") for name_area, area in areas.items(): - lon_spec, lat_spec = filtered.spectrum_lonlat('adt', area=area, ref=raw) - mappable = ax.plot(*lat_spec, label='lat %s high' % name_area)[0] - ax.plot(*lon_spec, label='lon %s high' % name_area, color=mappable.get_color(), linestyle='--') - - ax.set_xscale('log') + lon_spec, lat_spec = filtered.spectrum_lonlat("adt", area=area, ref=raw) + mappable = ax.plot(*lat_spec, label="lat %s high" % name_area)[0] + ax.plot( + *lon_spec, + label="lon %s high" % name_area, + color=mappable.get_color(), + linestyle="--" + ) + + ax.set_xscale("log") ax.legend() ax.grid() - fig.savefig('share/png/spectrum_ratio.png') + fig.savefig("share/png/spectrum_ratio.png") .. image:: ../share/png/spectrum_ratio.png diff --git a/examples/01_general_things/pet_storage.py b/examples/01_general_things/pet_storage.py index ccd01f1c..918ebbee 100644 --- a/examples/01_general_things/pet_storage.py +++ b/examples/01_general_things/pet_storage.py @@ -15,9 +15,9 @@ manage eddies associated in networks, the ```track``` and ```segment``` fields allow to separate observations """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from numpy import arange, outer +import py_eddy_tracker_sample from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.observations.network import NetworkObservations diff --git a/examples/02_eddy_identification/pet_eddy_detection_ACC.py b/examples/02_eddy_identification/pet_eddy_detection_ACC.py index e6c5e381..3d3d4ac1 100644 --- a/examples/02_eddy_identification/pet_eddy_detection_ACC.py +++ b/examples/02_eddy_identification/pet_eddy_detection_ACC.py @@ -9,8 +9,7 @@ """ from datetime import datetime -from matplotlib import pyplot as plt -from matplotlib import style +from matplotlib import pyplot as plt, style from py_eddy_tracker import data from py_eddy_tracker.dataset.grid import RegularGridDataset @@ -65,7 +64,8 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" y_name="latitude", # Manual area subset indexs=dict( - latitude=slice(100 - margin, 220 + margin), longitude=slice(0, 230 + margin), + latitude=slice(100 - margin, 220 + margin), + longitude=slice(0, 230 + margin), ), ) g_raw = RegularGridDataset(**kw_data) @@ -187,10 +187,16 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" ax.set_ylabel("With filter") ax.plot( - a_[field][i_a] * factor, a[field][j_a] * factor, "r.", label="Anticyclonic", + a_[field][i_a] * factor, + a[field][j_a] * factor, + "r.", + label="Anticyclonic", ) ax.plot( - c_[field][i_c] * factor, c[field][j_c] * factor, "b.", label="Cyclonic", + c_[field][i_c] * factor, + c[field][j_c] * factor, + "b.", + label="Cyclonic", ) ax.set_aspect("equal"), ax.grid() ax.plot((0, 1000), (0, 1000), "g") diff --git a/examples/02_eddy_identification/pet_interp_grid_on_dataset.py b/examples/02_eddy_identification/pet_interp_grid_on_dataset.py index f9e5d4c3..fa27a3d1 100644 --- a/examples/02_eddy_identification/pet_interp_grid_on_dataset.py +++ b/examples/02_eddy_identification/pet_interp_grid_on_dataset.py @@ -43,7 +43,7 @@ def update_axes(ax, mappable=None): # %% # Compute and store eke in cm²/s² aviso_map.add_grid( - "eke", (aviso_map.grid("u") ** 2 + aviso_map.grid("v") ** 2) * 0.5 * (100 ** 2) + "eke", (aviso_map.grid("u") ** 2 + aviso_map.grid("v") ** 2) * 0.5 * (100**2) ) eke_kwargs = dict(vmin=1, vmax=1000, cmap="magma_r") diff --git a/examples/02_eddy_identification/pet_statistics_on_identification.py b/examples/02_eddy_identification/pet_statistics_on_identification.py index 0c72262f..dbd73c61 100644 --- a/examples/02_eddy_identification/pet_statistics_on_identification.py +++ b/examples/02_eddy_identification/pet_statistics_on_identification.py @@ -4,9 +4,9 @@ Some statistics on raw identification without any tracking """ -import numpy as np from matplotlib import pyplot as plt from matplotlib.dates import date2num +import numpy as np from py_eddy_tracker import start_logger from py_eddy_tracker.data import get_remote_demo_sample diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index 89d64108..331ace8a 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -158,9 +158,7 @@ def update(i_frame): # %% # Format LAVD data lavd = RegularGridDataset.with_array( - coordinates=("lon", "lat"), - datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,), - centered=True, + coordinates=("lon", "lat"), datas=dict(lavd=lavd.T, lon=x_g, lat=y_g), centered=True ) # %% diff --git a/examples/07_cube_manipulation/pet_cube.py b/examples/07_cube_manipulation/pet_cube.py index 7f30c4e1..cba6c85b 100644 --- a/examples/07_cube_manipulation/pet_cube.py +++ b/examples/07_cube_manipulation/pet_cube.py @@ -4,9 +4,10 @@ Example which use CMEMS surface current with a Runge-Kutta 4 algorithm to advect particles. """ +from datetime import datetime, timedelta + # sphinx_gallery_thumbnail_number = 2 import re -from datetime import datetime, timedelta from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index b128286a..ef777639 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -49,7 +49,7 @@ def check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6): Check if distance between eastern or northern particle to center particle is bigger than `dist_max` """ nb_p = x.shape[0] // 3 - delta = dist_max ** 2 + delta = dist_max**2 for i in range(nb_p): i0 = i * 3 i_n = i0 + 1 @@ -59,10 +59,10 @@ def check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6): continue # Distance with north dxn, dyn = x[i0] - x[i_n], y[i0] - y[i_n] - dn = dxn ** 2 + dyn ** 2 + dn = dxn**2 + dyn**2 # Distance with east dxe, dye = x[i0] - x[i_e], y[i0] - y[i_e] - de = dxe ** 2 + dye ** 2 + de = dxe**2 + dye**2 if dn >= delta or de >= delta: s1 = dn + de @@ -71,7 +71,7 @@ def check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6): s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * ( (dxn - dye) ** 2 + (dxe + dyn) ** 2 ) - flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5)) + flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init**2) * (s1 + s2**0.5)) theta[i] = arctan2(at1, at2 + s2) * 180 / pi # To know where value are set m_set[i] = False diff --git a/examples/08_tracking_manipulation/pet_display_field.py b/examples/08_tracking_manipulation/pet_display_field.py index 30ad75a6..b943a2ba 100644 --- a/examples/08_tracking_manipulation/pet_display_field.py +++ b/examples/08_tracking_manipulation/pet_display_field.py @@ -4,8 +4,8 @@ """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/08_tracking_manipulation/pet_display_track.py b/examples/08_tracking_manipulation/pet_display_track.py index 13a8d3ad..b15d51d7 100644 --- a/examples/08_tracking_manipulation/pet_display_track.py +++ b/examples/08_tracking_manipulation/pet_display_track.py @@ -4,8 +4,8 @@ """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/08_tracking_manipulation/pet_one_track.py b/examples/08_tracking_manipulation/pet_one_track.py index 9f930281..a2536c34 100644 --- a/examples/08_tracking_manipulation/pet_one_track.py +++ b/examples/08_tracking_manipulation/pet_one_track.py @@ -2,8 +2,8 @@ One Track =================== """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/08_tracking_manipulation/pet_select_track_across_area.py b/examples/08_tracking_manipulation/pet_select_track_across_area.py index b88f37e1..58184e1f 100644 --- a/examples/08_tracking_manipulation/pet_select_track_across_area.py +++ b/examples/08_tracking_manipulation/pet_select_track_across_area.py @@ -3,8 +3,8 @@ ============================ """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py index 81e57e59..b686fd67 100644 --- a/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py +++ b/examples/08_tracking_manipulation/pet_track_anim_matplotlib_animation.py @@ -9,9 +9,9 @@ """ import re -import py_eddy_tracker_sample from matplotlib.animation import FuncAnimation from numpy import arange +import py_eddy_tracker_sample from py_eddy_tracker.appli.gui import Anim from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_birth_and_death.py b/examples/10_tracking_diagnostics/pet_birth_and_death.py index d917efbd..b67993a2 100644 --- a/examples/10_tracking_diagnostics/pet_birth_and_death.py +++ b/examples/10_tracking_diagnostics/pet_birth_and_death.py @@ -5,8 +5,8 @@ Following figures are based on https://doi.org/10.1016/j.pocean.2011.01.002 """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_center_count.py b/examples/10_tracking_diagnostics/pet_center_count.py index 6d9fa417..77a4dcda 100644 --- a/examples/10_tracking_diagnostics/pet_center_count.py +++ b/examples/10_tracking_diagnostics/pet_center_count.py @@ -5,9 +5,9 @@ Do Geo stat with center and compare with frequency method show: :ref:`sphx_glr_python_module_10_tracking_diagnostics_pet_pixel_used.py` """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from matplotlib.colors import LogNorm +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations @@ -27,7 +27,7 @@ step = 0.125 bins = ((-10, 37, step), (30, 46, step)) kwargs_pcolormesh = dict( - cmap="terrain_r", vmin=0, vmax=2, factor=1 / (a.nb_days * step ** 2), name="count" + cmap="terrain_r", vmin=0, vmax=2, factor=1 / (a.nb_days * step**2), name="count" ) diff --git a/examples/10_tracking_diagnostics/pet_geographic_stats.py b/examples/10_tracking_diagnostics/pet_geographic_stats.py index d2a7e90d..a2e3f6b5 100644 --- a/examples/10_tracking_diagnostics/pet_geographic_stats.py +++ b/examples/10_tracking_diagnostics/pet_geographic_stats.py @@ -4,8 +4,8 @@ """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_groups.py b/examples/10_tracking_diagnostics/pet_groups.py index f6e800ae..deedcc3f 100644 --- a/examples/10_tracking_diagnostics/pet_groups.py +++ b/examples/10_tracking_diagnostics/pet_groups.py @@ -3,9 +3,9 @@ =================== """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from numpy import arange, ones, percentile +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_histo.py b/examples/10_tracking_diagnostics/pet_histo.py index b2eff842..abf97c38 100644 --- a/examples/10_tracking_diagnostics/pet_histo.py +++ b/examples/10_tracking_diagnostics/pet_histo.py @@ -3,9 +3,9 @@ =================== """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from numpy import arange +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_lifetime.py b/examples/10_tracking_diagnostics/pet_lifetime.py index 9f84e790..4e2500fd 100644 --- a/examples/10_tracking_diagnostics/pet_lifetime.py +++ b/examples/10_tracking_diagnostics/pet_lifetime.py @@ -3,9 +3,9 @@ =================== """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from numpy import arange, ones +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_pixel_used.py b/examples/10_tracking_diagnostics/pet_pixel_used.py index 3907ce19..75a826d6 100644 --- a/examples/10_tracking_diagnostics/pet_pixel_used.py +++ b/examples/10_tracking_diagnostics/pet_pixel_used.py @@ -5,9 +5,9 @@ Do Geo stat with frequency and compare with center count method: :ref:`sphx_glr_python_module_10_tracking_diagnostics_pet_center_count.py` """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from matplotlib.colors import LogNorm +import py_eddy_tracker_sample from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/10_tracking_diagnostics/pet_propagation.py b/examples/10_tracking_diagnostics/pet_propagation.py index 6a65a212..e6bc6c1b 100644 --- a/examples/10_tracking_diagnostics/pet_propagation.py +++ b/examples/10_tracking_diagnostics/pet_propagation.py @@ -3,9 +3,9 @@ ===================== """ -import py_eddy_tracker_sample from matplotlib import pyplot as plt from numpy import arange, ones +import py_eddy_tracker_sample from py_eddy_tracker.generic import cumsum_by_track from py_eddy_tracker.observations.tracking import TrackEddiesObservations diff --git a/examples/12_external_data/pet_drifter_loopers.py b/examples/12_external_data/pet_drifter_loopers.py index 92707906..5266db7b 100644 --- a/examples/12_external_data/pet_drifter_loopers.py +++ b/examples/12_external_data/pet_drifter_loopers.py @@ -8,10 +8,10 @@ import re -import numpy as np -import py_eddy_tracker_sample from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation +import numpy as np +import py_eddy_tracker_sample from py_eddy_tracker import data from py_eddy_tracker.appli.gui import Anim diff --git a/examples/14_generic_tools/pet_visvalingam.py b/examples/14_generic_tools/pet_visvalingam.py index f7b29c10..736e8852 100644 --- a/examples/14_generic_tools/pet_visvalingam.py +++ b/examples/14_generic_tools/pet_visvalingam.py @@ -2,8 +2,8 @@ Visvalingam algorithm ===================== """ -import matplotlib.animation as animation from matplotlib import pyplot as plt +import matplotlib.animation as animation from numba import njit from numpy import array, empty diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index dbe0753e..21592558 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -5,8 +5,7 @@ """ import re -from matplotlib import colors -from matplotlib import pyplot as plt +from matplotlib import colors, pyplot as plt from matplotlib.animation import FuncAnimation from numpy import arange, meshgrid, ones, unique, zeros diff --git a/examples/16_network/pet_group_anim.py b/examples/16_network/pet_group_anim.py index 047f5820..f2d439ed 100644 --- a/examples/16_network/pet_group_anim.py +++ b/examples/16_network/pet_group_anim.py @@ -2,9 +2,10 @@ Network group process ===================== """ +from datetime import datetime + # sphinx_gallery_thumbnail_number = 2 import re -from datetime import datetime from matplotlib import pyplot as plt from matplotlib.animation import FuncAnimation diff --git a/examples/16_network/pet_ioannou_2017_case.py b/examples/16_network/pet_ioannou_2017_case.py index b02b846a..56bec82e 100644 --- a/examples/16_network/pet_ioannou_2017_case.py +++ b/examples/16_network/pet_ioannou_2017_case.py @@ -6,12 +6,12 @@ We want to find the Ierapetra Eddy described above in a network demonstration run. """ +from datetime import datetime, timedelta + # %% import re -from datetime import datetime, timedelta -from matplotlib import colors -from matplotlib import pyplot as plt +from matplotlib import colors, pyplot as plt from matplotlib.animation import FuncAnimation from matplotlib.ticker import FuncFormatter from numpy import arange, array, pi, where diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index d6b4568b..ecb0970d 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -147,9 +147,9 @@ def get_obs(dataset): ax = timeline_axes() n_.median_filter(15, "time", "latitude") -kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30 ** 2 * 20 +kw["s"] = (n_.radius_e * 1e-3) ** 2 / 30**2 * 20 m = n_.scatter_timeline( - ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all", + ax, "shape_error_e", vmin=14, vmax=70, **kw, yfield="lon", method="all" ) ax.set_ylabel("Longitude") cb = update_axes(ax, m["scatter"]) diff --git a/examples/16_network/pet_segmentation_anim.py b/examples/16_network/pet_segmentation_anim.py index 58f71188..1fcb9ae1 100644 --- a/examples/16_network/pet_segmentation_anim.py +++ b/examples/16_network/pet_segmentation_anim.py @@ -96,7 +96,7 @@ def update(i_frame): indices_frames = INDICES[i_frame] mappable_CONTOUR.set_data( - e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames], + e.contour_lon_e[indices_frames], e.contour_lat_e[indices_frames] ) mappable_CONTOUR.set_color(cmap.colors[tr[indices_frames] % len(cmap.colors)]) return (mappable_tracks,) diff --git a/setup.cfg b/setup.cfg index 66f3f495..eb88b6f9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,16 @@ + +[yapf] +column_limit = 100 + [flake8] max-line-length = 140 ignore = E203, # whitespace before ':' - W503, # line break before binary operator + W503, # line break before binary operator +exclude= + build + doc + versioneer.py [isort] combine_as_imports=True @@ -14,7 +22,7 @@ line_length=140 multi_line_output=3 skip= build - docs/source/conf.py + doc/conf.py [versioneer] @@ -27,4 +35,5 @@ parentdir_prefix = [tool:pytest] filterwarnings= - ignore:tostring.*is deprecated \ No newline at end of file + ignore:tostring.*is deprecated + diff --git a/setup.py b/setup.py index e0767c10..6b18bcbb 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- +from setuptools import find_packages, setup + import versioneer -from setuptools import setup, find_packages with open("README.md", "r") as fh: long_description = fh.read() diff --git a/share/fig.py b/share/fig.py index 8640abcb..80c7f12b 100644 --- a/share/fig.py +++ b/share/fig.py @@ -1,8 +1,10 @@ -from matplotlib import pyplot as plt -from py_eddy_tracker.dataset.grid import RegularGridDataset from datetime import datetime import logging +from matplotlib import pyplot as plt + +from py_eddy_tracker.dataset.grid import RegularGridDataset + grid_name, lon_name, lat_name = ( "nrt_global_allsat_phy_l4_20190223_20190226.nc", "longitude", diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index f3ecec84..0a98892d 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -20,9 +20,9 @@ """ -import logging from argparse import ArgumentParser from datetime import datetime +import logging import zarr diff --git a/src/py_eddy_tracker/appli/eddies.py b/src/py_eddy_tracker/appli/eddies.py index df4e7d43..c1c7a90d 100644 --- a/src/py_eddy_tracker/appli/eddies.py +++ b/src/py_eddy_tracker/appli/eddies.py @@ -3,12 +3,11 @@ Applications on detection and tracking files """ import argparse -import logging from datetime import datetime from glob import glob +import logging from os import mkdir -from os.path import basename, dirname, exists -from os.path import join as join_path +from os.path import basename, dirname, exists, join as join_path from re import compile as re_compile from netCDF4 import Dataset diff --git a/src/py_eddy_tracker/appli/gui.py b/src/py_eddy_tracker/appli/gui.py index 427db24b..4a8cdeb0 100644 --- a/src/py_eddy_tracker/appli/gui.py +++ b/src/py_eddy_tracker/appli/gui.py @@ -3,9 +3,9 @@ Entry point of graphic user interface """ -import logging from datetime import datetime, timedelta from itertools import chain +import logging from matplotlib import pyplot from matplotlib.animation import FuncAnimation diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index bfe226cc..03c5eb35 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -5,10 +5,11 @@ import logging +from numpy import in1d, zeros + from .. import EddyParser from ..observations.network import Network, NetworkObservations from ..observations.tracking import TrackEddiesObservations -from numpy import in1d, zeros logger = logging.getLogger("pet") @@ -110,7 +111,9 @@ def subset_network(): help="Remove short dead end, first is for minimal obs number and second for minimal segment time to keep", ) parser.add_argument( - "--remove_trash", action="store_true", help="Remove trash (network id == 0)", + "--remove_trash", + action="store_true", + help="Remove trash (network id == 0)", ) parser.add_argument( "-p", @@ -138,7 +141,6 @@ def quick_compare(): - S : segment - Obs : observations """ - ) parser.add_argument("ref", help="Identification file of reference") parser.add_argument("others", nargs="+", help="Identifications files to compare") @@ -148,7 +150,15 @@ def quick_compare(): args = parser.parse_args() kw = dict( - include_vars=['longitude', 'latitude', 'time', 'track', 'segment', 'next_obs', 'previous_obs'] + include_vars=[ + "longitude", + "latitude", + "time", + "track", + "segment", + "next_obs", + "previous_obs", + ] ) if args.path_out is not None: @@ -159,24 +169,26 @@ def quick_compare(): f"[ref] {args.ref} -> {ref.nb_network} network / {ref.nb_segment} segment / {len(ref)} obs " f"-> {ref.network_size(0)} trash obs, " f"{len(ref.merging_event())} merging, {len(ref.splitting_event())} spliting" - ) - others = {other: NetworkObservations.load_file(other, **kw) for other in args.others} - - if args.path_out is not None: - groups_ref, groups_other = run_compare(ref, others, **kwargs) - if not exists(args.path_out): - mkdir(args.path_out) - for i, other_ in enumerate(args.others): - dirname_ = f"{args.path_out}/{other_.replace('/', '_')}/" - if not exists(dirname_): - mkdir(dirname_) - for k, v in groups_other[other_].items(): - basename_ = f"other_{k}.nc" - others[other_].index(v).write_file(filename=f"{dirname_}/{basename_}") - for k, v in groups_ref[other_].items(): - basename_ = f"ref_{k}.nc" - ref.index(v).write_file(filename=f"{dirname_}/{basename_}") - return + ) + others = { + other: NetworkObservations.load_file(other, **kw) for other in args.others + } + + # if args.path_out is not None: + # groups_ref, groups_other = run_compare(ref, others, **kwargs) + # if not exists(args.path_out): + # mkdir(args.path_out) + # for i, other_ in enumerate(args.others): + # dirname_ = f"{args.path_out}/{other_.replace('/', '_')}/" + # if not exists(dirname_): + # mkdir(dirname_) + # for k, v in groups_other[other_].items(): + # basename_ = f"other_{k}.nc" + # others[other_].index(v).write_file(filename=f"{dirname_}/{basename_}") + # for k, v in groups_ref[other_].items(): + # basename_ = f"ref_{k}.nc" + # ref.index(v).write_file(filename=f"{dirname_}/{basename_}") + # return display_compare(ref, others) @@ -188,33 +200,43 @@ def run_compare(ref, others): f"[{i}] {k} -> {other.nb_network} network / {other.nb_segment} segment / {len(other)} obs " f"-> {other.network_size(0)} trash obs, " f"{len(other.merging_event())} merging, {len(other.splitting_event())} spliting" - ) + ) ref_id, other_id = ref.identify_in(other, size_min=2) m = other_id != -1 ref_id, other_id = ref_id[m], other_id[m] - out['same N(N)'] = m.sum() - out['same N(Obs)'] = ref.network_size(ref_id).sum() + out["same N(N)"] = m.sum() + out["same N(Obs)"] = ref.network_size(ref_id).sum() # For network which have same obs ref_, other_ = ref.networks(ref_id), other.networks(other_id) ref_segu, other_segu = ref_.identify_in(other_, segment=True) - m = other_segu==-1 + m = other_segu == -1 ref_track_no_match, _ = ref_.unique_segment_to_id(ref_segu[m]) ref_segu, other_segu = ref_segu[~m], other_segu[~m] m = ~in1d(ref_id, ref_track_no_match) - out['same NS(N)'] = m.sum() - out['same NS(Obs)'] = ref.network_size(ref_id[m]).sum() + out["same NS(N)"] = m.sum() + out["same NS(Obs)"] = ref.network_size(ref_id[m]).sum() # Check merge/split def follow_obs(d, i_follow): m = i_follow != -1 i_follow = i_follow[m] - t, x, y = zeros(m.size, d.time.dtype), zeros(m.size, d.longitude.dtype), zeros(m.size, d.latitude.dtype) - t[m], x[m], y[m] = d.time[i_follow], d.longitude[i_follow], d.latitude[i_follow] + t, x, y = ( + zeros(m.size, d.time.dtype), + zeros(m.size, d.longitude.dtype), + zeros(m.size, d.latitude.dtype), + ) + t[m], x[m], y[m] = ( + d.time[i_follow], + d.longitude[i_follow], + d.latitude[i_follow], + ) return t, x, y + def next_obs(d, i_seg): last_i = d.index_segment_track[1][i_seg] - 1 return follow_obs(d, d.next_obs[last_i]) + def previous_obs(d, i_seg): first_i = d.index_segment_track[0][i_seg] return follow_obs(d, d.previous_obs[first_i]) @@ -222,25 +244,24 @@ def previous_obs(d, i_seg): tref, xref, yref = next_obs(ref_, ref_segu) tother, xother, yother = next_obs(other_, other_segu) - m = (tref == tother) & (xref == xother) & (yref == yother) + m = (tref == tother) & (xref == xother) & (yref == yother) print(m.sum(), m.size, ref_segu.size, ref_track_no_match.size) tref, xref, yref = previous_obs(ref_, ref_segu) tother, xother, yother = previous_obs(other_, other_segu) - m = (tref == tother) & (xref == xother) & (yref == yother) + m = (tref == tother) & (xref == xother) & (yref == yother) print(m.sum(), m.size, ref_segu.size, ref_track_no_match.size) - - ref_segu, other_segu = ref.identify_in(other, segment=True) m = other_segu != -1 - out['same S(S)'] = m.sum() - out['same S(Obs)'] = ref.segment_size()[ref_segu[m]].sum() + out["same S(S)"] = m.sum() + out["same S(Obs)"] = ref.segment_size()[ref_segu[m]].sum() outs[k] = out return outs + def display_compare(ref, others): def display(value, ref=None): if ref: @@ -248,16 +269,16 @@ def display(value, ref=None): else: outs = value return "".join([f"{v:^18}" for v in outs]) - + datas = run_compare(ref, others) ref_ = { - 'same N(N)' : ref.nb_network, + "same N(N)": ref.nb_network, "same N(Obs)": len(ref), - 'same NS(N)' : ref.nb_network, - 'same NS(Obs)' : len(ref), - 'same S(S)' : ref.nb_segment, - 'same S(Obs)' : len(ref), - } + "same NS(N)": ref.nb_network, + "same NS(Obs)": len(ref), + "same S(S)": ref.nb_segment, + "same S(Obs)": len(ref), + } print(" ", display(ref_.keys())) for i, (_, v) in enumerate(datas.items()): print(f"[{i:2}] ", display(v, ref=ref_)) diff --git a/src/py_eddy_tracker/data/__init__.py b/src/py_eddy_tracker/data/__init__.py index 4702af8f..f14fee87 100644 --- a/src/py_eddy_tracker/data/__init__.py +++ b/src/py_eddy_tracker/data/__init__.py @@ -10,8 +10,8 @@ """ import io import lzma -import tarfile from os import path +import tarfile import requests diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 24b1e25b..1cf871a7 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2,14 +2,13 @@ """ Class to load and manipulate RegularGrid and UnRegularGrid """ -import logging from datetime import datetime +import logging from cv2 import filter2D from matplotlib.path import Path as BasePath from netCDF4 import Dataset -from numba import njit, prange -from numba import types as numba_types +from numba import njit, prange, types as numba_types from numpy import ( arange, array, @@ -28,9 +27,7 @@ isnan, linspace, ma, -) -from numpy import mean as np_mean -from numpy import ( + mean as np_mean, meshgrid, nan, nanmean, @@ -2299,14 +2296,18 @@ def from_netcdf_cube(cls, filename, x_name, y_name, t_name, heigth=None, **kwarg new = cls() with Dataset(filename) as h: for i, t in enumerate(h.variables[t_name][:]): - d = RegularGridDataset(filename, x_name, y_name, indexs={t_name: i}, **kwargs) + d = RegularGridDataset( + filename, x_name, y_name, indexs={t_name: i}, **kwargs + ) if heigth is not None: d.add_uv(heigth) new.datasets.append((t, d)) return new @classmethod - def from_netcdf_list(cls, filenames, t, x_name, y_name, indexs=None, heigth=None, **kwargs): + def from_netcdf_list( + cls, filenames, t, x_name, y_name, indexs=None, heigth=None, **kwargs + ): new = cls() for i, _t in enumerate(t): filename = filenames[i] diff --git a/src/py_eddy_tracker/eddy_feature.py b/src/py_eddy_tracker/eddy_feature.py index 0f13eb2a..8bc139ab 100644 --- a/src/py_eddy_tracker/eddy_feature.py +++ b/src/py_eddy_tracker/eddy_feature.py @@ -8,8 +8,7 @@ from matplotlib.cm import get_cmap from matplotlib.colors import Normalize from matplotlib.figure import Figure -from numba import njit -from numba import types as numba_types +from numba import njit, types as numba_types from numpy import ( array, concatenate, diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index c2d7de8a..7dbbf3c3 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -3,8 +3,7 @@ Tool method which use mostly numba """ -from numba import njit, prange -from numba import types as numba_types +from numba import njit, prange, types as numba_types from numpy import ( absolute, arcsin, diff --git a/src/py_eddy_tracker/gui.py b/src/py_eddy_tracker/gui.py index 0f310467..a85e9c18 100644 --- a/src/py_eddy_tracker/gui.py +++ b/src/py_eddy_tracker/gui.py @@ -3,12 +3,12 @@ GUI class """ -import logging from datetime import datetime, timedelta +import logging +from matplotlib.projections import register_projection import matplotlib.pyplot as plt import numpy as np -from matplotlib.projections import register_projection from .generic import flatten_line_matrix, split_line diff --git a/src/py_eddy_tracker/misc.py b/src/py_eddy_tracker/misc.py index eb0dc5d1..647bfba3 100644 --- a/src/py_eddy_tracker/misc.py +++ b/src/py_eddy_tracker/misc.py @@ -1,6 +1,8 @@ import re + from matplotlib.animation import FuncAnimation + class VideoAnimation(FuncAnimation): def _repr_html_(self, *args, **kwargs): """To get video in html and have a player""" @@ -16,4 +18,4 @@ def save(self, *args, **kwargs): with open(args[0], "w") as _: pass return - return super().save(*args, **kwargs) \ No newline at end of file + return super().save(*args, **kwargs) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 66574407..121ffa29 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -1,14 +1,12 @@ -import logging from abc import ABC, abstractmethod +import logging -from numba import njit -from numba import types as nb_types -from numpy import arange, int32, interp, median, where, zeros, full, isnan - -from .observation import EddiesObservations +from numba import njit, types as nb_types +from numpy import arange, full, int32, interp, isnan, median, where, zeros from ..generic import window_index from ..poly import create_meshed_particles, poly_indexs +from .observation import EddiesObservations logger = logging.getLogger("pet") @@ -69,7 +67,7 @@ def get_missing_indices( return indices -def advect(x, y, c, t0, n_days, u_name='u', v_name='v'): +def advect(x, y, c, t0, n_days, u_name="u", v_name="v"): """ Advect particles from t0 to t0 + n_days, with data cube. @@ -92,7 +90,9 @@ def advect(x, y, c, t0, n_days, u_name='u', v_name='v'): return t, x, y -def particle_candidate_step(t_start, contours_start, contours_end, space_step, dt, c, **kwargs): +def particle_candidate_step( + t_start, contours_start, contours_end, space_step, dt, c, **kwargs +): """Select particles within eddies, advect them, return target observation and associated percentages. For one time step. @@ -122,7 +122,7 @@ def particle_candidate_step(t_start, contours_start, contours_end, space_step, d i_target, pct_target = full(shape, -1, dtype="i4"), zeros(shape, dtype="f8") nb_end = contours_end[0].shape[0] get_targets(i_start, i_end, i_target, pct_target, nb_end) - return i_target, pct_target.astype('i1') + return i_target, pct_target.astype("i1") def particle_candidate( @@ -217,6 +217,7 @@ def get_targets(i_start, i_end, i_target, pct, nb_end): pct[i, 1] = pct_ i_target[i, 1] = j + @njit(cache=True) def get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct): """Compute target observation and associated percentages @@ -346,7 +347,9 @@ def keep_tracks_by_date(self, date, nb_days): return self.extract_with_mask(mask) - def particle_candidate_atlas(self, cube, space_step, dt, start_intern=False, end_intern=False, **kwargs): + def particle_candidate_atlas( + self, cube, space_step, dt, start_intern=False, end_intern=False, **kwargs + ): """Select particles within eddies, advect them, return target observation and associated percentages :param `~py_eddy_tracker.dataset.grid.GridCollection` cube: GridCollection with speed for particles @@ -359,7 +362,9 @@ def particle_candidate_atlas(self, cube, space_step, dt, start_intern=False, end """ t_start, t_end = int(self.period[0]), int(self.period[1]) # Pre-compute to get time index - i_sort, i_start, i_end = window_index(self.time, arange(t_start, t_end + 1), .5) + i_sort, i_start, i_end = window_index( + self.time, arange(t_start, t_end + 1), 0.5 + ) # Out shape shape = (len(self), 2) i_target, pct = full(shape, -1, dtype="i4"), zeros(shape, dtype="i1") @@ -368,19 +373,20 @@ def particle_candidate_atlas(self, cube, space_step, dt, start_intern=False, end for t in times: # Get index for origin i = t - t_start - indexs0 = i_sort[i_start[i]:i_end[i]] + indexs0 = i_sort[i_start[i] : i_end[i]] # Get index for end i = t + dt - t_start - indexs1 = i_sort[i_start[i]:i_end[i]] + indexs1 = i_sort[i_start[i] : i_end[i]] # Get contour data contours0 = [self[label][indexs0] for label in self.intern(start_intern)] contours1 = [self[label][indexs1] for label in self.intern(end_intern)] # Get local result - i_target_, pct_ = particle_candidate_step(t, contours0, contours1, space_step, dt, cube, **kwargs) + i_target_, pct_ = particle_candidate_step( + t, contours0, contours1, space_step, dt, cube, **kwargs + ) # Merge result m = i_target_ != -1 i_target_[m] = indexs1[i_target_[m]] - i_target[indexs0] = i_target_ - pct[indexs0] = pct_ + i_target[indexs0] = i_target_ + pct[indexs0] = pct_ return i_target, pct - \ No newline at end of file diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index b633fc40..661144e7 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -2,14 +2,13 @@ """ Class to create network of observations """ +from glob import glob import logging import time -from glob import glob import netCDF4 -import zarr -from numba.typed import List from numba import njit +from numba.typed import List from numpy import ( arange, array, @@ -25,6 +24,7 @@ where, zeros, ) +import zarr from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude @@ -188,7 +188,7 @@ def network_segment_size(self, id_networks=None): """ i0, i1, ref = build_index(self.track[self.index_segment_track[0]]) if id_networks is None: - return i1-i0 + return i1 - i0 else: i = id_networks - ref return i1[i] - i0[i] @@ -204,7 +204,7 @@ def network_size(self, id_networks=None): else: i = id_networks - self.index_network[2] return self.index_network[1][i] - self.index_network[0][i] - + def unique_segment_to_id(self, id_unique): """Return id network and id segment for a unique id @@ -220,7 +220,7 @@ def segment_slice(self, id_network, id_segment): :param int id_network: id to identify network :param int id_segment: id to identify segment """ - raise Exception('need to be implemented') + raise Exception("need to be implemented") def network_slice(self, id_network): """ @@ -1221,12 +1221,18 @@ def network(self, id_network): def networks_mask(self, id_networks, segment=False): if segment: - return generate_mask_from_ids(id_networks, self.track.size, *self.index_segment_track) + return generate_mask_from_ids( + id_networks, self.track.size, *self.index_segment_track + ) else: - return generate_mask_from_ids(id_networks, self.track.size, *self.index_network) + return generate_mask_from_ids( + id_networks, self.track.size, *self.index_network + ) def networks(self, id_networks): - return self.extract_with_mask(generate_mask_from_ids(id_networks, self.track.size, *self.index_network)) + return self.extract_with_mask( + generate_mask_from_ids(id_networks, self.track.size, *self.index_network) + ) @property def nb_network(self): @@ -1234,7 +1240,7 @@ def nb_network(self): Count and return number of network """ return (self.network_size() != 0).sum() - + @property def nb_segment(self): """ @@ -1252,15 +1258,18 @@ def identify_in(self, other, size_min=1, segment=False): """ if segment: counts = self.segment_size(), other.segment_size() - i_self_ref, i_other_ref = self.ref_segment_track_index, other.ref_segment_track_index - var_id = 'segment' + i_self_ref, i_other_ref = ( + self.ref_segment_track_index, + other.ref_segment_track_index, + ) + var_id = "segment" else: counts = self.network_size(), other.network_size() i_self_ref, i_other_ref = self.ref_index, other.ref_index - var_id = 'track' + var_id = "track" # object to contain index of couple - in_self, in_other = list(), list() - # We iterate on item of same size + in_self, in_other = list(), list() + # We iterate on item of same size for i_self, i_other, i0, _ in self.align_on(other, counts, all_ref=True): if i0 < size_min: continue @@ -1277,16 +1286,18 @@ def identify_in(self, other, size_min=1, segment=False): # We get absolute id id_self, id_other = i_self + i_self_ref, i_other + i_other_ref # We compute mask to select data - m_self, m_other = self.networks_mask(id_self, segment), other.networks_mask(id_other, segment) + m_self, m_other = self.networks_mask(id_self, segment), other.networks_mask( + id_other, segment + ) # We extract obs obs_self, obs_other = self.obs[m_self], other.obs[m_other] - x1, y1, t1 = obs_self['lon'], obs_self['lat'], obs_self['time'] - x2, y2, t2 = obs_other['lon'], obs_other['lat'], obs_other['time'] + x1, y1, t1 = obs_self["lon"], obs_self["lat"], obs_self["time"] + x2, y2, t2 = obs_other["lon"], obs_other["lat"], obs_other["time"] if segment: - ids1 = build_unique_array(obs_self['segment'], obs_self['track']) - ids2 = build_unique_array(obs_other['segment'], obs_other['track']) + ids1 = build_unique_array(obs_self["segment"], obs_self["track"]) + ids2 = build_unique_array(obs_other["segment"], obs_other["track"]) label1 = self.segment_track_array[m_self] label2 = other.segment_track_array[m_other] else: @@ -1295,22 +1306,26 @@ def identify_in(self, other, size_min=1, segment=False): i01, indexs1, id1 = list(), List(), list() for sl_self, id_, _ in self.iter_on(ids1): i01.append(sl_self.start) - indexs1.append(obs_self[sl_self].argsort(order=['time', 'lon', 'lat'])) + indexs1.append(obs_self[sl_self].argsort(order=["time", "lon", "lat"])) id1.append(label1[sl_self.start]) i02, indexs2, id2 = list(), List(), list() for sl_other, _, _ in other.iter_on(ids2): i02.append(sl_other.start) - indexs2.append(obs_other[sl_other].argsort(order=['time', 'lon', 'lat'])) + indexs2.append( + obs_other[sl_other].argsort(order=["time", "lon", "lat"]) + ) id2.append(label2[sl_other.start]) id1, id2 = array(id1), array(id2) # We search item from self in item of others - i_local_target = same_position(x1, y1, t1, x2, y2, t2, array(i01), array(i02), indexs1, indexs2) + i_local_target = same_position( + x1, y1, t1, x2, y2, t2, array(i01), array(i02), indexs1, indexs2 + ) # -1 => no item found in other dataset m = i_local_target != -1 in_self.append(id1) - track2_ = -ones(id1.shape, dtype='i4') + track2_ = -ones(id1.shape, dtype="i4") track2_[m] = id2[i_local_target[m]] in_other.append(track2_) @@ -1804,7 +1819,7 @@ def mask_obs_close_event(self, merging=True, spliting=True, dt=3): :param int dt: delta of time max , defaults to 3 :return array: mask """ - m = zeros(len(self), dtype='bool') + m = zeros(len(self), dtype="bool") if merging: i_target, ip1, ip2 = self.merging_event(triplet=True, only_index=True) mask_follow_obs(m, self.previous_obs, self.time, ip1, dt) @@ -1817,6 +1832,7 @@ def mask_obs_close_event(self, merging=True, spliting=True, dt=3): mask_follow_obs(m, self.previous_obs, self.time, i_target, dt) return m + class Network: __slots__ = ( "window", @@ -2034,6 +2050,7 @@ def new_numbering(segs, start=0): def ptp(values): return values.max() - values.min() + @njit(cache=True) def generate_mask_from_ids(id_networks, nb, istart, iend, i0): """From list of id, we generate a mask @@ -2045,22 +2062,23 @@ def generate_mask_from_ids(id_networks, nb, istart, iend, i0): :param int i0: ref index from :py:meth:`~py_eddy_tracker.generic.build_index` :return array: return a mask """ - m = zeros(nb, dtype='bool') + m = zeros(nb, dtype="bool") for i in id_networks: - for j in range(istart[i-i0], iend[i-i0]): + for j in range(istart[i - i0], iend[i - i0]): m[j] = True return m + @njit(cache=True) def same_position(x0, y0, t0, x1, y1, t1, i00, i01, i0, i1): """Return index of track/segment found in other dataset - :param array x0: - :param array y0: - :param array t0: - :param array x1: - :param array y1: - :param array t1: + :param array x0: + :param array y0: + :param array t0: + :param array x1: + :param array y1: + :param array t1: :param array i00: First index of track/segment/network in dataset0 :param array i01: First index of track/segment/network in dataset1 :param List(array) i0: list of array which contain index to order dataset0 @@ -2068,9 +2086,9 @@ def same_position(x0, y0, t0, x1, y1, t1, i00, i01, i0, i1): :return array: index of dataset1 which match with dataset0, -1 => no match """ nb0, nb1 = i00.size, i01.size - i_target = -ones(nb0, dtype='i4') + i_target = -ones(nb0, dtype="i4") # To avoid to compare multiple time, if already match - used1 = zeros(nb1, dtype='bool') + used1 = zeros(nb1, dtype="bool") for j0 in range(nb0): for j1 in range(nb1): if used1[j1]: @@ -2085,9 +2103,10 @@ def same_position(x0, y0, t0, x1, y1, t1, i00, i01, i0, i1): if test: i_target[j0] = j1 used1[j1] = True - break + break return i_target + @njit(cache=True) def mask_follow_obs(m, next_obs, time, indexs, dt=3): """Generate a mask to select close obs in time from index diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 384f537f..c2ff4fdb 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2,21 +2,18 @@ """ Base class to manage eddy observation """ -import logging from datetime import datetime from io import BufferedReader, BytesIO +import logging from tarfile import ExFileObject from tokenize import TokenError -import packaging.version -import zarr +from Polygon import Polygon from matplotlib.cm import get_cmap -from matplotlib.collections import LineCollection -from matplotlib.collections import PolyCollection +from matplotlib.collections import LineCollection, PolyCollection from matplotlib.colors import Normalize from netCDF4 import Dataset -from numba import njit -from numba import types as numba_types +from numba import njit, types as numba_types from numpy import ( absolute, arange, @@ -45,9 +42,10 @@ where, zeros, ) +import packaging.version from pint import UnitRegistry from pint.errors import UndefinedUnitError -from Polygon import Polygon +import zarr from .. import VAR_DESCR, VAR_DESCR_inv, __version__ from ..generic import ( @@ -65,13 +63,13 @@ bbox_intersection, close_center, convexs, + create_meshed_particles, create_vertice, get_pixel_in_regular, insidepoly, poly_indexs, reduce_size, vertice_overlap, - create_meshed_particles, ) logger = logging.getLogger("pet") @@ -645,7 +643,9 @@ def align_on(self, other, var_name="time", all_ref=False, **kwargs): break if b0_self < b0_other: if all_ref: - yield indexs_self, empty(0, dtype=indexs_self.dtype), b0_self, b1_self + yield indexs_self, empty( + 0, dtype=indexs_self.dtype + ), b0_self, b1_self continue yield indexs_self, indexs_other, b0_self, b1_self @@ -2056,10 +2056,12 @@ def display_color(self, ax, field, intern=False, **kwargs): xname, yname = self.intern(intern) x, y = self[xname], self[yname] c = self.parse_varname(field) - cmap = get_cmap(kwargs.pop('cmap', 'Spectral_r')) - cmin, cmax = kwargs.pop('vmin', c.min()), kwargs.pop('vmax', c.max()) + cmap = get_cmap(kwargs.pop("cmap", "Spectral_r")) + cmin, cmax = kwargs.pop("vmin", c.min()), kwargs.pop("vmax", c.max()) colors = cmap((c - cmin) / (cmax - cmin)) - lines = LineCollection([create_vertice(i,j) for i,j in zip(x,y)], colors=colors, **kwargs) + lines = LineCollection( + [create_vertice(i, j) for i, j in zip(x, y)], colors=colors, **kwargs + ) ax.add_collection(lines) return lines diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 7680961c..993e30f9 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -2,8 +2,8 @@ """ Class to manage observations gathered in trajectories """ -import logging from datetime import datetime, timedelta +import logging from numba import njit from numpy import ( diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 6baf5ad8..6adb02c1 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -5,11 +5,10 @@ import heapq -from numba import njit, prange -from numba import types as numba_types +from Polygon import Polygon +from numba import njit, prange, types as numba_types from numpy import arctan, array, concatenate, empty, nan, ones, pi, where, zeros from numpy.linalg import lstsq -from Polygon import Polygon from .generic import build_index diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index 7543a4d3..02068962 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -3,14 +3,13 @@ Class to store link between observations """ +from datetime import datetime, timedelta import json import logging import platform -from datetime import datetime, timedelta from netCDF4 import Dataset, default_fillvals -from numba import njit -from numba import types as numba_types +from numba import njit, types as numba_types from numpy import ( arange, array, diff --git a/src/scripts/EddyTranslate b/src/scripts/EddyTranslate index 26ab3a7b..a0060e9b 100644 --- a/src/scripts/EddyTranslate +++ b/src/scripts/EddyTranslate @@ -3,8 +3,8 @@ """ Translate eddy Dataset """ -import zarr from netCDF4 import Dataset +import zarr from py_eddy_tracker import EddyParser from py_eddy_tracker.observations.observation import EddiesObservations diff --git a/tests/test_track.py b/tests/test_track.py index 4f362a26..f7e83786 100644 --- a/tests/test_track.py +++ b/tests/test_track.py @@ -1,5 +1,5 @@ -import zarr from netCDF4 import Dataset +import zarr from py_eddy_tracker.data import get_demo_path from py_eddy_tracker.featured_tracking.area_tracker import AreaTracker From e31d0a73b56438df9c7fd990db91c1e930f392ca Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 26 Sep 2022 17:42:29 +0200 Subject: [PATCH 071/115] add option to choose time step in particle candidate --- src/py_eddy_tracker/observations/groups.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 121ffa29..710557f7 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -91,7 +91,7 @@ def advect(x, y, c, t0, n_days, u_name="u", v_name="v"): def particle_candidate_step( - t_start, contours_start, contours_end, space_step, dt, c, **kwargs + t_start, contours_start, contours_end, space_step, dt, c, day_fraction=6, **kwargs ): """Select particles within eddies, advect them, return target observation and associated percentages. For one time step. @@ -102,13 +102,17 @@ def particle_candidate_step( :param float space_step: step between 2 particles :param int dt: duration of advection :param `~py_eddy_tracker.dataset.grid.GridCollection` c: GridCollection with speed for particles + :param int day_fraction: fraction of day :params dict kwargs: dict of params given to advection :return (np.array,np.array): return target index and percent associate """ + # In case of zarr array + contours_start = [i[:] for i in contours_start] + contours_end = [i[:] for i in contours_end] # Create particles in start contour x, y, i_start = create_meshed_particles(*contours_start, space_step) # Advect particles - kw = dict(nb_step=6, time_step=86400 / 6) + kw = dict(nb_step=day_fraction, time_step=86400 / day_fraction) p = c.advect(x, y, t_init=t_start, **kwargs, **kw) for _ in range(dt): _, x, y = p.__next__() From abd4433c895ec2aa712c8b41e996785bfc41ffa1 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 27 Sep 2022 10:32:17 +0200 Subject: [PATCH 072/115] -lazy cube management -event statistics --- src/py_eddy_tracker/dataset/grid.py | 59 +++++++++++---- src/py_eddy_tracker/observations/groups.py | 71 +++++++++++++++---- src/py_eddy_tracker/observations/network.py | 68 +++++++++++++++++- .../observations/observation.py | 25 +++++++ 4 files changed, 194 insertions(+), 29 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 1cf871a7..c73f99d9 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -304,14 +304,26 @@ def __init__( "We assume pixel position of grid is centered for %s", filename ) if not unset: - self.load_general_features() - self.load() + self.populate() def populate(self): if self.dimensions is None: self.load_general_features() self.load() + def clean(self): + self.dimensions = None + self.variables_description = None + self.global_attrs = None + self.x_c = None + self.y_c = None + self.x_bounds = None + self.y_bounds = None + self.x_dim = None + self.y_dim = None + self.contours = None + self.vars = dict() + @property def is_centered(self): """Give True if pixel is described with its center's position or @@ -429,7 +441,7 @@ def c_to_bounds(c): def setup_coordinates(self): x_name, y_name = self.coordinates if self.is_centered: - logger.info("Grid center") + # logger.info("Grid center") self.x_c = self.vars[x_name].astype("float64") self.y_c = self.vars[y_name].astype("float64") @@ -1968,14 +1980,21 @@ def interp(self, grid_name, lons, lats, method="bilinear"): self.x_c, self.y_c, g, m, lons, lats, nearest=method == "nearest" ) - def uv_for_advection(self, u_name, v_name, time_step=600, backward=False, factor=1): + def uv_for_advection(self, u_name=None, v_name=None, time_step=600, h_name=None, backward=False, factor=1): """ Get U,V to be used in degrees with precomputed time step - :param str,array u_name: U field to advect obs - :param str,array v_name: V field to advect obs + :param None,str,array u_name: U field to advect obs, if h_name is None + :param None,str,array v_name: V field to advect obs, if h_name is None + :param None,str,array h_name: H field to compute UV to advect obs, if u_name and v_name are None :param int time_step: Number of second for each advection """ + if h_name is not None: + u_name, v_name = 'u', 'v' + if u_name not in self.vars: + self.add_uv(h_name) + self.vars.pop(h_name, None) + u = (self.grid(u_name) if isinstance(u_name, str) else u_name).copy() v = (self.grid(v_name) if isinstance(v_name, str) else v_name).copy() # N seconds / 1 degrees in m @@ -2318,6 +2337,14 @@ def from_netcdf_list( new.datasets.append((_t, d)) return new + @property + def are_loaded(self): + return ~array([d.dimensions is None for _, d in self.datasets]) + + def __repr__(self): + nb_dataset = len(self.datasets) + return f"{self.are_loaded.sum()}/{nb_dataset} datasets loaded" + def shift_files(self, t, filename, heigth=None, **rgd_kwargs): """Add next file to the list and remove the oldest""" @@ -2440,17 +2467,23 @@ def filament( t += dt yield t, f_x, f_y + def reset_grids(self, N=None): + if N is not None: + m = self.are_loaded + if m.sum() > N: + for i in where(m)[0]: + self.datasets[i][1].clean() + def advect( self, x, y, - u_name, - v_name, t_init, mask_particule=None, nb_step=10, time_step=600, rk4=True, + reset_grid=None, **kw, ): """ @@ -2458,18 +2491,18 @@ def advect( :param array x: Longitude of obs to move :param array y: Latitude of obs to move - :param str,array u_name: U field to advect obs - :param str,array v_name: V field to advect obs :param float t_init: time to start advection :param array,None mask_particule: advect only i mask is True :param int nb_step: Number of iteration before to release data :param int time_step: Number of second for each advection :param bool rk4: Use rk4 algorithm instead of finite difference + :param int reset_grid: Delete all loaded data in cube if there are more than N grid loaded :return: t,x,y position .. minigallery:: py_eddy_tracker.GridCollection.advect """ + self.reset_grids(reset_grid) backward = kw.get("backward", False) if backward: generator = self.get_previous_time_step(t_init) @@ -2480,9 +2513,9 @@ def advect( dt = nb_step * time_step t_step = time_step t0, d0 = generator.__next__() - u0, v0, m0 = d0.uv_for_advection(u_name, v_name, time_step, **kw) + u0, v0, m0 = d0.uv_for_advection(time_step=time_step, **kw) t1, d1 = generator.__next__() - u1, v1, m1 = d1.uv_for_advection(u_name, v_name, time_step, **kw) + u1, v1, m1 = d1.uv_for_advection(time_step=time_step, **kw) t0 = t0 * 86400 t1 = t1 * 86400 t = t_init * 86400 @@ -2497,7 +2530,7 @@ def advect( t0, u0, v0, m0 = t1, u1, v1, m1 t1, d1 = generator.__next__() t1 = t1 * 86400 - u1, v1, m1 = d1.uv_for_advection(u_name, v_name, time_step, **kw) + u1, v1, m1 = d1.uv_for_advection(time_step=time_step, **kw) w = 1 - (arange(t, t + dt, t_step) - t0) / (t1 - t0) half_w = t_step / 2.0 / (t1 - t0) advect_( diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 710557f7..ace889f7 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -114,7 +114,7 @@ def particle_candidate_step( # Advect particles kw = dict(nb_step=day_fraction, time_step=86400 / day_fraction) p = c.advect(x, y, t_init=t_start, **kwargs, **kw) - for _ in range(dt): + for _ in range(abs(dt)): _, x, y = p.__next__() m = ~(isnan(x) + isnan(y)) i_end = full(x.shape, -1, dtype="i4") @@ -352,7 +352,7 @@ def keep_tracks_by_date(self, date, nb_days): return self.extract_with_mask(mask) def particle_candidate_atlas( - self, cube, space_step, dt, start_intern=False, end_intern=False, **kwargs + self, cube, space_step, dt, start_intern=False, end_intern=False, callback_coherence=None, finalize_coherence=None, **kwargs ): """Select particles within eddies, advect them, return target observation and associated percentages @@ -361,7 +361,9 @@ def particle_candidate_atlas( :param int dt: duration of advection :param bool start_intern: Use intern or extern contour at injection, defaults to False :param bool end_intern: Use intern or extern contour at end of advection, defaults to False - :params dict kwargs: dict of params given to advection + :param dict kwargs: dict of params given to advection + :param func callback_coherence: if None we will use cls.fill_coherence + :param func finalize_coherence: to apply on results of callback_coherence :return (np.array,np.array): return target index and percent associate """ t_start, t_end = int(self.period[0]), int(self.period[1]) @@ -374,23 +376,62 @@ def particle_candidate_atlas( i_target, pct = full(shape, -1, dtype="i4"), zeros(shape, dtype="i1") # Backward or forward times = arange(t_start, t_end - dt) if dt > 0 else arange(t_start + dt, t_end) + + if callback_coherence is None: + callback_coherence = self.fill_coherence + indexs = dict() + results = list() + kw_coherence = dict(space_step=space_step, dt=dt, c=cube) + kw_coherence.update(kwargs) for t in times: + logger.info("Coherence for time step : %s in [%s:%s]", t, times[0], times[-1]) # Get index for origin i = t - t_start indexs0 = i_sort[i_start[i] : i_end[i]] # Get index for end i = t + dt - t_start indexs1 = i_sort[i_start[i] : i_end[i]] - # Get contour data - contours0 = [self[label][indexs0] for label in self.intern(start_intern)] - contours1 = [self[label][indexs1] for label in self.intern(end_intern)] - # Get local result - i_target_, pct_ = particle_candidate_step( - t, contours0, contours1, space_step, dt, cube, **kwargs - ) - # Merge result - m = i_target_ != -1 - i_target_[m] = indexs1[i_target_[m]] - i_target[indexs0] = i_target_ - pct[indexs0] = pct_ + if indexs0.size == 0 or indexs1.size == 0: + continue + + results.append(callback_coherence(self, i_target, pct, indexs0, indexs1, start_intern, end_intern, t_start=t, **kw_coherence)) + indexs[results[-1]] = indexs0, indexs1 + + if finalize_coherence is not None: + finalize_coherence(results, indexs, i_target, pct) return i_target, pct + + @classmethod + def fill_coherence(cls, network, i_targets, percents, i_origin, i_end, start_intern, end_intern, **kwargs): + """_summary_ + + :param array i_targets: global target + :param array percents: + :param array i_origin: indices of origins + :param array i_end: indices of ends + :param bool start_intern: Use intern or extern contour at injection + :param bool end_intern: Use intern or extern contour at end of advection + """ + # Get contour data + contours_start = [network[label][i_origin] for label in cls.intern(start_intern)] + contours_end = [network[label][i_end] for label in cls.intern(end_intern)] + # Compute local coherence + i_local_targets, local_percents = particle_candidate_step(contours_start=contours_start, contours_end=contours_end,**kwargs) + # Store + cls.merge_particle_result(i_targets, percents, i_local_targets, local_percents, i_origin, i_end) + + @staticmethod + def merge_particle_result(i_targets, percents, i_local_targets, local_percents, i_origin, i_end): + """Copy local result in merged result with global indexation + + :param array i_targets: global target + :param array percents: + :param array i_local_targets: local index target + :param array local_percents: + :param array i_origin: indices of origins + :param array i_end: indices of ends + """ + m = i_local_targets != -1 + i_local_targets[m] = i_end[i_local_targets[m]] + i_targets[i_origin] = i_local_targets + percents[i_origin] = local_percents diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 661144e7..146a87cc 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -7,7 +7,7 @@ import time import netCDF4 -from numba import njit +from numba import njit, types as nb_types from numba.typed import List from numpy import ( arange, @@ -23,6 +23,8 @@ unique, where, zeros, + percentile, + nan ) import zarr @@ -1743,6 +1745,7 @@ def segment_coherence_forward( step_mesh=1.0 / 50, contour_start="speed", contour_end="speed", + **kwargs, ): """ @@ -1801,6 +1804,7 @@ def date2file(julian_day): n_days=n_days, contour_start=contour_start, contour_end=contour_end, + **kwargs ) logger.info( ( @@ -1996,7 +2000,69 @@ def build_dataset(self, group, raw_data=True): print() eddies.track[new_i] = group return eddies + +@njit(cache=True) +def get_percentile_on_following_obs(i, indexs, percents, follow_obs, t, segment, i_target, window, q=50, nb_min=1): + """Get stat on a part of segment close of an event + + :param int i: index to follow + :param array indexs: indexs from coherence + :param array percents: percent from coherence + :param array[int] follow_obs: give index for the following observation + :param array t: time for each observation + :param array segment: segment for each observation + :param int i_target: index of target + :param int window: time window of search + :param int q: Percentile from 0 to 100, defaults to 50 + :param int nb_min: Number minimal of observation to provide statistics, defaults to 1 + :return float : return statistic + """ + last_t, segment_follow = t[i], segment[i] + segment_target = segment[i_target] + percent_target = empty(window, dtype=percents.dtype) + j = 0 + while abs(last_t - t[i]) < window and i != -1 and segment_follow == segment[i]: + # Iter on primary & secondary + for index, percent in zip(indexs[i], percents[i]): + if index != -1 and segment[index] == segment_target: + percent_target[j] = percent + j += 1 + i = follow_obs[i] + if j < nb_min: + return nan + return percentile(percent_target[:j], q) +@njit(cache=True) +def get_percentile_around_event(i, i1, i2, ind, pct, follow_obs, t, segment, window=10, follow_parent=False, q=50, nb_min=1): + """Get stat around event + + :param array[int] i: Indexs of target + :param array[int] i1: Indexs of primary origin + :param array[int] i2: Indexs of secondary origin + :param array ind: indexs from coherence + :param array pct: percent from coherence + :param array[int] follow_obs: give index for the following observation + :param array t: time for each observation + :param array segment: segment for each observation + :param int window: time window of search, defaults to 10 + :param bool follow_parent: Follow parent instead of child, defaults to False + :param int q: Percentile from 0 to 100, defaults to 50 + :param int nb_min: Number minimal of observation to provide statistics, defaults to 1 + :return (array,array) : statistic for each event + """ + stat1 = empty(i.size, dtype=nb_types.float32) + stat2 = empty(i.size, dtype=nb_types.float32) + # iter on event + for j, (i_, i1_, i2_) in enumerate(zip(i, i1, i2)): + if follow_parent: + # We follow parent + stat1[j] = get_percentile_on_following_obs(i_, ind, pct, follow_obs, t, segment, i1_, window, q, nb_min) + stat2[j] = get_percentile_on_following_obs(i_, ind, pct, follow_obs, t, segment, i2_, window, q, nb_min) + else: + # We follow child + stat1[j] = get_percentile_on_following_obs(i1_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min) + stat2[j] = get_percentile_on_following_obs(i2_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min) + return stat1, stat2 @njit(cache=True) def get_next_index(gr): diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index c2ff4fdb..7b1e0e45 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -1656,6 +1656,31 @@ def create_variable( except ValueError: logger.warning("Data is empty") + @staticmethod + def get_filters_zarr(name): + """Get filters to store in zarr for known variable + + :param str name: private variable name + :return list: filters list + """ + content = VAR_DESCR.get(name) + filters = list() + store_dtype = content["output_type"] + scale_factor, add_offset = content.get("scale_factor", None), content.get("add_offset", None) + if scale_factor is not None or add_offset is not None: + if add_offset is None: + add_offset = 0 + filters.append( + zarr.FixedScaleOffset( + offset=add_offset, + scale=1 / scale_factor, + dtype=content["nc_type"], + astype=store_dtype, + ) + ) + filters.extend(content.get("filters", [])) + return filters + def create_variable_zarr( self, handler_zarr, From 38b223a4b2dc07411c30dea71b6704f609fa3643 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 27 Sep 2022 14:59:00 +0200 Subject: [PATCH 073/115] dissociate return table to transfer old indice in new --- src/py_eddy_tracker/observations/groups.py | 2 +- src/py_eddy_tracker/observations/network.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index ace889f7..d363a5dd 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -375,7 +375,7 @@ def particle_candidate_atlas( shape = (len(self), 2) i_target, pct = full(shape, -1, dtype="i4"), zeros(shape, dtype="i1") # Backward or forward - times = arange(t_start, t_end - dt) if dt > 0 else arange(t_start + dt, t_end) + times = arange(t_start, t_end - dt) if dt > 0 else arange(t_start - dt, t_end) if callback_coherence is None: callback_coherence = self.fill_coherence diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 146a87cc..0285936f 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1214,6 +1214,7 @@ def dissociate_network(self): translate[:-1][i_sort] = arange(nb_obs) self.next_obs[:] = translate[n] self.previous_obs[:] = translate[p] + return translate def network_segment(self, id_network, id_segment): return self.extract_with_mask(self.segment_slice(id_network, id_segment)) From f1260a09692958479a165cae0a636704ff9df649 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 28 Sep 2022 14:19:47 +0200 Subject: [PATCH 074/115] modify advect option --- examples/06_grid_manipulation/pet_advect.py | 20 +++++++++---------- examples/06_grid_manipulation/pet_lavd.py | 4 ++-- examples/07_cube_manipulation/pet_fsle_med.py | 4 ++-- .../pet_lavd_detection.py | 10 +++++----- examples/16_network/pet_follow_particle.py | 6 +++--- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/examples/06_grid_manipulation/pet_advect.py b/examples/06_grid_manipulation/pet_advect.py index 1a98536a..ab2a0e14 100644 --- a/examples/06_grid_manipulation/pet_advect.py +++ b/examples/06_grid_manipulation/pet_advect.py @@ -73,7 +73,7 @@ def save(self, *args, **kwargs): # %% # Movie properties kwargs = dict(frames=arange(51), interval=100) -kw_p = dict(nb_step=2, time_step=21600) +kw_p = dict(u_name="u", v_name="v", nb_step=2, time_step=21600) frame_t = kw_p["nb_step"] * kw_p["time_step"] / 86400.0 @@ -102,7 +102,7 @@ def update(i_frame, t_step): # ^^^^^^^^^^^^^^^^ # Draw 3 last position in one path for each particles., # it could be run backward with `backward=True` option in filament method -p = g.filament(x, y, "u", "v", **kw_p, filament_size=3) +p = g.filament(x, y, **kw_p, filament_size=3) fig, txt, l, t = anim_ax(lw=0.5) _ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,)) @@ -110,13 +110,13 @@ def update(i_frame, t_step): # Particle forward # ^^^^^^^^^^^^^^^^^ # Forward advection of particles -p = g.advect(x, y, "u", "v", **kw_p) +p = g.advect(x, y, **kw_p) fig, txt, l, t = anim_ax(ls="", marker=".", markersize=1) _ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,)) # %% # We get last position and run backward until original position -p = g.advect(x, y, "u", "v", **kw_p, backward=True) +p = g.advect(x, y, **kw_p, backward=True) fig, txt, l, _ = anim_ax(ls="", marker=".", markersize=1) _ = VideoAnimation(fig, update, **kwargs, fargs=(-frame_t,)) @@ -139,9 +139,9 @@ def update(i_frame, t_step): ) for time_step in (10800, 21600, 43200, 86400): x, y = x0.copy(), y0.copy() - kw_advect = dict(nb_step=int(50 * 86400 / time_step), time_step=time_step) - g.advect(x, y, "u", "v", **kw_advect).__next__() - g.advect(x, y, "u", "v", **kw_advect, backward=True).__next__() + kw_advect = dict(nb_step=int(50 * 86400 / time_step), time_step=time_step, u_name="u", v_name="v") + g.advect(x, y, **kw_advect).__next__() + g.advect(x, y, **kw_advect, backward=True).__next__() d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5 ax.hist(d, **kw, label=f"{86400. / time_step:.0f} time step by day") ax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc="lower right"), ax.grid() @@ -158,9 +158,9 @@ def update(i_frame, t_step): time_step = 10800 for duration in (5, 50, 100): x, y = x0.copy(), y0.copy() - kw_advect = dict(nb_step=int(duration * 86400 / time_step), time_step=time_step) - g.advect(x, y, "u", "v", **kw_advect).__next__() - g.advect(x, y, "u", "v", **kw_advect, backward=True).__next__() + kw_advect = dict(nb_step=int(duration * 86400 / time_step), time_step=time_step, u_name="u", v_name="v") + g.advect(x, y, **kw_advect).__next__() + g.advect(x, y, **kw_advect, backward=True).__next__() d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5 ax.hist(d, **kw, label=f"Time duration {duration} days") ax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc="lower right"), ax.grid() diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index 331ace8a..639db99e 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -110,9 +110,9 @@ def save(self, *args, **kwargs): step_by_day = 3 # Compute step of advection every 4h nb_step = 2 -kw_p = dict(nb_step=nb_step, time_step=86400 / step_by_day / nb_step) +kw_p = dict(nb_step=nb_step, time_step=86400 / step_by_day / nb_step, u_name="u", v_name="v") # Start a generator which at each iteration return new position at next time step -particule = g.advect(x, y, "u", "v", **kw_p, rk4=True) +particule = g.advect(x, y, **kw_p, rk4=True) # %% # LAVD diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index ef777639..a949ec77 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -142,8 +142,8 @@ def build_triplet(x, y, step=0.02): used = zeros(x.shape[0], dtype="bool") # advection generator -kw = dict(t_init=t0, nb_step=1, backward=backward, mask_particule=used) -p = c.advect(x, y, "u", "v", time_step=86400 / time_step_by_days, **kw) +kw = dict(t_init=t0, nb_step=1, backward=backward, mask_particule=used, u_name="u", v_name="v") +p = c.advect(x, y, time_step=86400 / time_step_by_days, **kw) # We check at each step of advection if particle distance is over `dist_max` for i in range(time_step_by_days * nb_days): diff --git a/examples/07_cube_manipulation/pet_lavd_detection.py b/examples/07_cube_manipulation/pet_lavd_detection.py index 1fa4d60b..4dace120 100644 --- a/examples/07_cube_manipulation/pet_lavd_detection.py +++ b/examples/07_cube_manipulation/pet_lavd_detection.py @@ -93,7 +93,7 @@ def update_axes(ax, mappable=None): # Time properties, for example with advection only 25 days nb_days, step_by_day = 25, 6 nb_time = step_by_day * nb_days -kw_p = dict(nb_step=1, time_step=86400 / step_by_day) +kw_p = dict(nb_step=1, time_step=86400 / step_by_day, u_name="u", v_name="v") t0 = 20236 t0_grid = c[t0] # Geographic properties, we use a coarser resolution for time consuming reasons @@ -114,7 +114,7 @@ def update_axes(ax, mappable=None): # ---------------------------- lavd = zeros(original_shape) lavd_ = lavd[m] -p = c.advect(x0.copy(), y0.copy(), "u", "v", t_init=t0, **kw_p) +p = c.advect(x0.copy(), y0.copy(), t_init=t0, **kw_p) for _ in range(nb_time): t, x, y = p.__next__() lavd_ += abs(c.interp("vort", t / 86400.0, x, y)) @@ -131,7 +131,7 @@ def update_axes(ax, mappable=None): # ----------------------------- lavd = zeros(original_shape) lavd_ = lavd[m] -p = c.advect(x0.copy(), y0.copy(), "u", "v", t_init=t0, backward=True, **kw_p) +p = c.advect(x0.copy(), y0.copy(), t_init=t0, backward=True, **kw_p) for i in range(nb_time): t, x, y = p.__next__() lavd_ += abs(c.interp("vort", t / 86400.0, x, y)) @@ -148,7 +148,7 @@ def update_axes(ax, mappable=None): # --------------------------- lavd = zeros(original_shape) lavd_ = lavd[m] -p = t0_grid.advect(x0.copy(), y0.copy(), "u", "v", **kw_p) +p = t0_grid.advect(x0.copy(), y0.copy(), **kw_p) for _ in range(nb_time): x, y = p.__next__() lavd_ += abs(t0_grid.interp("vort", x, y)) @@ -165,7 +165,7 @@ def update_axes(ax, mappable=None): # ---------------------------- lavd = zeros(original_shape) lavd_ = lavd[m] -p = t0_grid.advect(x0.copy(), y0.copy(), "u", "v", backward=True, **kw_p) +p = t0_grid.advect(x0.copy(), y0.copy(), backward=True, **kw_p) for i in range(nb_time): x, y = p.__next__() lavd_ += abs(t0_grid.interp("vort", x, y)) diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 21592558..356c7da4 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -95,11 +95,11 @@ def save(self, *args, **kwargs): a.txt.set_position((25, 31)) step = 0.25 -kw_p = dict(nb_step=2, time_step=86400 * step * 0.5, t_init=t_snapshot - 2 * step) +kw_p = dict(nb_step=2, time_step=86400 * step * 0.5, t_init=t_snapshot - 2 * step, u_name="u", v_name="v") mappables = dict() -particules = c.advect(x, y, "u", "v", **kw_p) -filament = c.filament(x_f, y_f, "u", "v", **kw_p, filament_size=3) +particules = c.advect(x, y, **kw_p) +filament = c.filament(x_f, y_f, **kw_p, filament_size=3) kw = dict(ls="", marker=".", markersize=0.25) for k in index_: m = k == index From d54a743e61891ddbe913da7f2a25f5a389b09540 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 28 Sep 2022 14:24:03 +0200 Subject: [PATCH 075/115] return sorting argument --- src/py_eddy_tracker/observations/network.py | 53 ++++++++----------- .../observations/observation.py | 20 ++++--- src/py_eddy_tracker/observations/tracking.py | 6 +-- src/py_eddy_tracker/tracking.py | 4 +- 4 files changed, 38 insertions(+), 45 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 0285936f..604035e4 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -106,12 +106,14 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): class NetworkObservations(GroupEddiesObservations): - __slots__ = ("_index_network",) - + __slots__ = ("_index_network", "_index_segment_track", "_segment_track_array") NOGROUP = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + self.reset_index() + + def reset_index(self): self._index_network = None self._index_segment_track = None self._segment_track_array = None @@ -251,9 +253,8 @@ def elements(self): def astype(self, cls): new = cls.new_like(self, self.shape) - print() - for k in new.obs.dtype.names: - if k in self.obs.dtype.names: + for k in new.fields: + if k in self.fields: new[k][:] = self[k][:] new.sign_type = self.sign_type return new @@ -371,8 +372,7 @@ def correct_close_events(self, nb_days_max=20): self.segment[:] = segment_copy self.previous_obs[:] = previous_obs - - self.sort() + return self.sort() def sort(self, order=("track", "segment", "time")): """ @@ -380,14 +380,19 @@ def sort(self, order=("track", "segment", "time")): :param tuple order: order or sorting. Given to :func:`numpy.argsort` """ - index_order = self.obs.argsort(order=order) - for field in self.elements: + index_order = self.obs.argsort(order=order, kind="mergesort") + self.reset_index() + for field in self.fields: self[field][:] = self[field][index_order] - translate = -ones(index_order.max() + 2, dtype="i4") - translate[index_order] = arange(index_order.shape[0]) + nb_obs = len(self) + # we add 1 for -1 index return index -1 + translate = -ones(nb_obs + 1, dtype="i4") + translate[index_order] = arange(nb_obs) + # next & previous must be re-indexed self.next_obs[:] = translate[self.next_obs] self.previous_obs[:] = translate[self.previous_obs] + return index_order, translate def obs_relative_order(self, i_obs): self.only_one_network() @@ -654,16 +659,16 @@ def normalize_longitude(self): lon0 = (self.lon[i_start] - 180).repeat(i_stop - i_start) logger.debug("Normalize longitude") self.lon[:] = (self.lon - lon0) % 360 + lon0 - if "lon_max" in self.obs.dtype.names: + if "lon_max" in self.fields: logger.debug("Normalize longitude_max") self.lon_max[:] = (self.lon_max - self.lon + 180) % 360 + self.lon - 180 if not self.raw_data: - if "contour_lon_e" in self.obs.dtype.names: + if "contour_lon_e" in self.fields: logger.debug("Normalize effective contour longitude") self.contour_lon_e[:] = ( (self.contour_lon_e.T - self.lon + 180) % 360 + self.lon - 180 ).T - if "contour_lon_s" in self.obs.dtype.names: + if "contour_lon_s" in self.fields: logger.debug("Normalize speed contour longitude") self.contour_lon_s[:] = ( (self.contour_lon_s.T - self.lon + 180) % 360 + self.lon - 180 @@ -1071,7 +1076,7 @@ def extract_event(self, indices): raw_data=self.raw_data, ) - for k in new.obs.dtype.names: + for k in new.fields: new[k][:] = self[k][indices] new.sign_type = self.sign_type return new @@ -1194,27 +1199,11 @@ def dissociate_network(self): """ Dissociate networks with no known interaction (splitting/merging) """ - tags = self.tag_segment(multi_network=True) if self.track[0] == 0: tags -= 1 - self.track[:] = tags[self.segment_track_array] - - i_sort = self.obs.argsort(order=("track", "segment", "time"), kind="mergesort") - # Sort directly obs, with hope to save memory - self.obs.sort(order=("track", "segment", "time"), kind="mergesort") - self._index_network = None - - # n & p must be re-indexed - n, p = self.next_obs, self.previous_obs - # we add 2 for -1 index return index -1 - nb_obs = len(self) - translate = -ones(nb_obs + 1, dtype="i4") - translate[:-1][i_sort] = arange(nb_obs) - self.next_obs[:] = translate[n] - self.previous_obs[:] = translate[p] - return translate + return self.sort() def network_segment(self, id_network, id_segment): return self.extract_with_mask(self.segment_slice(id_network, id_segment)) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 7b1e0e45..ae95315e 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -306,12 +306,16 @@ def box_display(value): """Return values evenly spaced with few numbers""" return "".join([f"{v_:10.2f}" for v_ in value]) + @property + def fields(self): + return list(self.obs.dtype.names) + def field_table(self): """ Produce description table of the fields available in this object """ rows = [("Name (Unit)", "Long name", "Scale factor", "Offset")] - names = list(self.obs.dtype.names) + names = self.fields names.sort() for field in names: infos = VAR_DESCR[field] @@ -414,7 +418,7 @@ def remove_fields(self, *fields): """ nb_obs = self.obs.shape[0] fields = set(fields) - only_variables = set(self.obs.dtype.names) - fields + only_variables = set(self.fields) - fields track_extra_variables = set(self.track_extra_variables) - fields array_variables = set(self.array_variables) - fields new = self.__class__( @@ -426,7 +430,7 @@ def remove_fields(self, *fields): raw_data=self.raw_data, ) new.sign_type = self.sign_type - for name in new.obs.dtype.names: + for name in new.fields: logger.debug("Copy of field %s ...", name) new.obs[name] = self.obs[name] return new @@ -444,12 +448,12 @@ def add_fields(self, fields=list(), array_fields=list()): track_array_variables=self.track_array_variables, array_variables=list(concatenate((self.array_variables, array_fields))), only_variables=list( - concatenate((self.obs.dtype.names, fields, array_fields)) + concatenate((self.fields, fields, array_fields)) ), raw_data=self.raw_data, ) new.sign_type = self.sign_type - for name in self.obs.dtype.names: + for name in self.fields: logger.debug("Copy of field %s ...", name) new.obs[name] = self.obs[name] return new @@ -467,8 +471,8 @@ def circle_contour(self, only_virtual=False, factor=1): """ angle = radians(linspace(0, 360, self.track_array_variables)) x_norm, y_norm = cos(angle), sin(angle) - radius_s = "contour_lon_s" in self.obs.dtype.names - radius_e = "contour_lon_e" in self.obs.dtype.names + radius_s = "contour_lon_s" in self.fields + radius_e = "contour_lon_e" in self.fields for i, obs in enumerate(self): if only_virtual and not obs["virtual"]: continue @@ -684,7 +688,7 @@ def distance(self, other): def __copy__(self): eddies = self.new_like(self, len(self)) - for k in self.obs.dtype.names: + for k in self.fields: eddies[k][:] = self[k][:] eddies.sign_type = self.sign_type return eddies diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 993e30f9..f1d2399b 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -183,16 +183,16 @@ def normalize_longitude(self): lon0 = (self.lon[self.index_from_track] - 180).repeat(self.nb_obs_by_track) logger.debug("Normalize longitude") self.lon[:] = (self.lon - lon0) % 360 + lon0 - if "lon_max" in self.obs.dtype.names: + if "lon_max" in self.fields: logger.debug("Normalize longitude_max") self.lon_max[:] = (self.lon_max - self.lon + 180) % 360 + self.lon - 180 if not self.raw_data: - if "contour_lon_e" in self.obs.dtype.names: + if "contour_lon_e" in self.fields: logger.debug("Normalize effective contour longitude") self.contour_lon_e[:] = ( (self.contour_lon_e.T - self.lon + 180) % 360 + self.lon - 180 ).T - if "contour_lon_s" in self.obs.dtype.names: + if "contour_lon_s" in self.fields: logger.debug("Normalize speed contour longitude") self.contour_lon_s[:] = ( (self.contour_lon_s.T - self.lon + 180) % 360 + self.lon - 180 diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index 02068962..16616d5a 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -658,7 +658,7 @@ def merge(self, until=-1, raw_data=True): # Set type of eddy with first file eddies.sign_type = self.current_obs.sign_type # Fields to copy - fields = self.current_obs.obs.dtype.names + fields = self.current_obs.fields # To know if the track start first_obs_save_in_tracks = zeros(self.i_current_by_tracks.shape, dtype=bool_) @@ -707,7 +707,7 @@ def merge(self, until=-1, raw_data=True): # Index in the current file index_current = self[i]["out"] - if "cost_association" in eddies.obs.dtype.names: + if "cost_association" in eddies.fields: eddies["cost_association"][index_final - 1] = self[i]["cost_value"] # Copy all variable for field in fields: From 2ba0d2af02d59dff8c6ed811ffcd5c5ff04e8189 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 28 Sep 2022 15:08:37 +0200 Subject: [PATCH 076/115] update example --- .../pet_particles_drift.py | 6 ++-- .../06_grid_manipulation/pet_advect.ipynb | 28 +++++++++---------- .../06_grid_manipulation/pet_lavd.ipynb | 18 ++++++------ .../07_cube_manipulation/pet_fsle_med.ipynb | 22 +++++++-------- .../pet_lavd_detection.ipynb | 12 ++++---- .../pet_particles_drift.ipynb | 6 ++-- src/py_eddy_tracker/observations/groups.py | 2 +- 7 files changed, 47 insertions(+), 47 deletions(-) diff --git a/examples/07_cube_manipulation/pet_particles_drift.py b/examples/07_cube_manipulation/pet_particles_drift.py index f73216fc..c61ced5b 100644 --- a/examples/07_cube_manipulation/pet_particles_drift.py +++ b/examples/07_cube_manipulation/pet_particles_drift.py @@ -20,7 +20,7 @@ "longitude", "latitude", "time", - heigth="adt", + unset=True ) # %% @@ -34,7 +34,7 @@ # Get paths x0, y0 = meshgrid(arange(32, 35, 0.5), arange(32.5, 34.5, 0.5)) x0, y0 = x0.reshape(-1), y0.reshape(-1) -t, x, y = c.path(x0, y0, "u", "v", t_init=t0, **kw_p, nb_time=nb_time) +t, x, y = c.path(x0, y0, h_name="adt", t_init=t0, **kw_p, nb_time=nb_time) # %% # Plot paths @@ -43,4 +43,4 @@ ax.plot(x, y, lw=3) ax.set_title("10 days particle paths") ax.set_xlim(31, 35), ax.set_ylim(32, 34.5) -ax.grid() +ax.grid() \ No newline at end of file diff --git a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb index 79d69b0d..90ee1722 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_advect.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\nGrid advection\n==============\n\nDummy advection which use only static geostrophic current, which didn't solve the complex circulation of the ocean.\n" + "\n# Grid advection\n\nDummy advection which use only static geostrophic current, which didn't solve the complex circulation of the ocean.\n" ] }, { @@ -98,7 +98,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Anim\n----\nParticles setup\n\n" + "## Anim\nParticles setup\n\n" ] }, { @@ -127,7 +127,7 @@ }, "outputs": [], "source": [ - "kwargs = dict(frames=arange(51), interval=100)\nkw_p = dict(nb_step=2, time_step=21600)\nframe_t = kw_p[\"nb_step\"] * kw_p[\"time_step\"] / 86400.0" + "kwargs = dict(frames=arange(51), interval=100)\nkw_p = dict(u_name=\"u\", v_name=\"v\", nb_step=2, time_step=21600)\nframe_t = kw_p[\"nb_step\"] * kw_p[\"time_step\"] / 86400.0" ] }, { @@ -152,7 +152,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Filament forward\n^^^^^^^^^^^^^^^^\nDraw 3 last position in one path for each particles.,\nit could be run backward with `backward=True` option in filament method\n\n" + "### Filament forward\nDraw 3 last position in one path for each particles.,\nit could be run backward with `backward=True` option in filament method\n\n" ] }, { @@ -163,14 +163,14 @@ }, "outputs": [], "source": [ - "p = g.filament(x, y, \"u\", \"v\", **kw_p, filament_size=3)\nfig, txt, l, t = anim_ax(lw=0.5)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,))" + "p = g.filament(x, y, **kw_p, filament_size=3)\nfig, txt, l, t = anim_ax(lw=0.5)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Particle forward\n^^^^^^^^^^^^^^^^^\nForward advection of particles\n\n" + "### Particle forward\nForward advection of particles\n\n" ] }, { @@ -181,7 +181,7 @@ }, "outputs": [], "source": [ - "p = g.advect(x, y, \"u\", \"v\", **kw_p)\nfig, txt, l, t = anim_ax(ls=\"\", marker=\".\", markersize=1)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,))" + "p = g.advect(x, y, **kw_p)\nfig, txt, l, t = anim_ax(ls=\"\", marker=\".\", markersize=1)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(frame_t,))" ] }, { @@ -199,21 +199,21 @@ }, "outputs": [], "source": [ - "p = g.advect(x, y, \"u\", \"v\", **kw_p, backward=True)\nfig, txt, l, _ = anim_ax(ls=\"\", marker=\".\", markersize=1)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(-frame_t,))" + "p = g.advect(x, y, **kw_p, backward=True)\nfig, txt, l, _ = anim_ax(ls=\"\", marker=\".\", markersize=1)\n_ = VideoAnimation(fig, update, **kwargs, fargs=(-frame_t,))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Particles stat\n--------------\n\n" + "## Particles stat\n\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Time_step settings\n^^^^^^^^^^^^^^^^^^\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" + "### Time_step settings\nDummy experiment to test advection precision, we run particles 50 days forward and backward with different time step\nand we measure distance between new positions and original positions.\n\n" ] }, { @@ -224,14 +224,14 @@ }, "outputs": [], "source": [ - "fig = plt.figure()\nax = fig.add_subplot(111)\nkw = dict(\n bins=arange(0, 50, 0.001),\n cumulative=True,\n weights=ones(x0.shape) / x0.shape[0] * 100.0,\n histtype=\"step\",\n)\nfor time_step in (10800, 21600, 43200, 86400):\n x, y = x0.copy(), y0.copy()\n kw_advect = dict(nb_step=int(50 * 86400 / time_step), time_step=time_step)\n g.advect(x, y, \"u\", \"v\", **kw_advect).__next__()\n g.advect(x, y, \"u\", \"v\", **kw_advect, backward=True).__next__()\n d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5\n ax.hist(d, **kw, label=f\"{86400. / time_step:.0f} time step by day\")\nax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc=\"lower right\"), ax.grid()\nax.set_title(\"Distance after 50 days forward and 50 days backward\")\nax.set_xlabel(\"Distance between original position and final position (in degrees)\")\n_ = ax.set_ylabel(\"Percent of particles with distance lesser than\")" + "fig = plt.figure()\nax = fig.add_subplot(111)\nkw = dict(\n bins=arange(0, 50, 0.001),\n cumulative=True,\n weights=ones(x0.shape) / x0.shape[0] * 100.0,\n histtype=\"step\",\n)\nfor time_step in (10800, 21600, 43200, 86400):\n x, y = x0.copy(), y0.copy()\n kw_advect = dict(nb_step=int(50 * 86400 / time_step), time_step=time_step, u_name=\"u\", v_name=\"v\")\n g.advect(x, y, **kw_advect).__next__()\n g.advect(x, y, **kw_advect, backward=True).__next__()\n d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5\n ax.hist(d, **kw, label=f\"{86400. / time_step:.0f} time step by day\")\nax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc=\"lower right\"), ax.grid()\nax.set_title(\"Distance after 50 days forward and 50 days backward\")\nax.set_xlabel(\"Distance between original position and final position (in degrees)\")\n_ = ax.set_ylabel(\"Percent of particles with distance lesser than\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Time duration\n^^^^^^^^^^^^^\nWe keep same time_step but change time duration\n\n" + "### Time duration\nWe keep same time_step but change time duration\n\n" ] }, { @@ -242,7 +242,7 @@ }, "outputs": [], "source": [ - "fig = plt.figure()\nax = fig.add_subplot(111)\ntime_step = 10800\nfor duration in (5, 50, 100):\n x, y = x0.copy(), y0.copy()\n kw_advect = dict(nb_step=int(duration * 86400 / time_step), time_step=time_step)\n g.advect(x, y, \"u\", \"v\", **kw_advect).__next__()\n g.advect(x, y, \"u\", \"v\", **kw_advect, backward=True).__next__()\n d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5\n ax.hist(d, **kw, label=f\"Time duration {duration} days\")\nax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc=\"lower right\"), ax.grid()\nax.set_title(\n \"Distance after N days forward and N days backward\\nwith a time step of 1/8 days\"\n)\nax.set_xlabel(\"Distance between original position and final position (in degrees)\")\n_ = ax.set_ylabel(\"Percent of particles with distance lesser than \")" + "fig = plt.figure()\nax = fig.add_subplot(111)\ntime_step = 10800\nfor duration in (5, 50, 100):\n x, y = x0.copy(), y0.copy()\n kw_advect = dict(nb_step=int(duration * 86400 / time_step), time_step=time_step, u_name=\"u\", v_name=\"v\")\n g.advect(x, y, **kw_advect).__next__()\n g.advect(x, y, **kw_advect, backward=True).__next__()\n d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5\n ax.hist(d, **kw, label=f\"Time duration {duration} days\")\nax.set_xlim(0, 0.25), ax.set_ylim(0, 100), ax.legend(loc=\"lower right\"), ax.grid()\nax.set_title(\n \"Distance after N days forward and N days backward\\nwith a time step of 1/8 days\"\n)\nax.set_xlabel(\"Distance between original position and final position (in degrees)\")\n_ = ax.set_ylabel(\"Percent of particles with distance lesser than \")" ] } ], @@ -262,7 +262,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb index c4a4da84..cbe6de64 100644 --- a/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb +++ b/notebooks/python_module/06_grid_manipulation/pet_lavd.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\nLAVD experiment\n===============\n\nNaive method to reproduce LAVD(Lagrangian-Averaged Vorticity deviation) method with a static velocity field.\nIn the current example we didn't remove a mean vorticity.\n\nMethod are described here:\n\n - Abernathey, Ryan, and George Haller. \"Transport by Lagrangian Vortices in the Eastern Pacific\",\n Journal of Physical Oceanography 48, 3 (2018): 667-685, accessed Feb 16, 2021,\n https://doi.org/10.1175/JPO-D-17-0102.1\n - `Transport by Coherent Lagrangian Vortices`_,\n R. Abernathey, Sinha A., Tarshish N., Liu T., Zhang C., Haller G., 2019,\n Talk a t the Sources and Sinks of Ocean Mesoscale Eddy Energy CLIVAR Workshop\n\n https://usclivar.org/sites/default/files/meetings/2019/presentations/Aberernathey_CLIVAR.pdf\n" + "\n# LAVD experiment\n\nNaive method to reproduce LAVD(Lagrangian-Averaged Vorticity deviation) method with a static velocity field.\nIn the current example we didn't remove a mean vorticity.\n\nMethod are described here:\n\n - Abernathey, Ryan, and George Haller. \"Transport by Lagrangian Vortices in the Eastern Pacific\",\n Journal of Physical Oceanography 48, 3 (2018): 667-685, accessed Feb 16, 2021,\n https://doi.org/10.1175/JPO-D-17-0102.1\n - `Transport by Coherent Lagrangian Vortices`_,\n R. Abernathey, Sinha A., Tarshish N., Liu T., Zhang C., Haller G., 2019,\n Talk a t the Sources and Sinks of Ocean Mesoscale Eddy Energy CLIVAR Workshop\n\n https://usclivar.org/sites/default/files/meetings/2019/presentations/Aberernathey_CLIVAR.pdf\n" ] }, { @@ -55,7 +55,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Data\n----\nTo compute vorticity ($\\omega$) we compute u/v field with a stencil and apply the following equation with stencil\nmethod :\n\n\\begin{align}\\omega = \\frac{\\partial v}{\\partial x} - \\frac{\\partial u}{\\partial y}\\end{align}\n\n" + "## Data\nTo compute vorticity ($\\omega$) we compute u/v field with a stencil and apply the following equation with stencil\nmethod :\n\n\\begin{align}\\omega = \\frac{\\partial v}{\\partial x} - \\frac{\\partial u}{\\partial y}\\end{align}\n\n" ] }, { @@ -91,7 +91,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Particles\n---------\nParticles specification\n\n" + "## Particles\nParticles specification\n\n" ] }, { @@ -102,14 +102,14 @@ }, "outputs": [], "source": [ - "step = 1 / 32\nx_g, y_g = arange(0, 36, step), arange(28, 46, step)\nx, y = meshgrid(x_g, y_g)\noriginal_shape = x.shape\nx, y = x.reshape(-1), y.reshape(-1)\nprint(f\"{len(x)} particles advected\")\n# A frame every 8h\nstep_by_day = 3\n# Compute step of advection every 4h\nnb_step = 2\nkw_p = dict(nb_step=nb_step, time_step=86400 / step_by_day / nb_step)\n# Start a generator which at each iteration return new position at next time step\nparticule = g.advect(x, y, \"u\", \"v\", **kw_p, rk4=True)" + "step = 1 / 32\nx_g, y_g = arange(0, 36, step), arange(28, 46, step)\nx, y = meshgrid(x_g, y_g)\noriginal_shape = x.shape\nx, y = x.reshape(-1), y.reshape(-1)\nprint(f\"{len(x)} particles advected\")\n# A frame every 8h\nstep_by_day = 3\n# Compute step of advection every 4h\nnb_step = 2\nkw_p = dict(nb_step=nb_step, time_step=86400 / step_by_day / nb_step, u_name=\"u\", v_name=\"v\")\n# Start a generator which at each iteration return new position at next time step\nparticule = g.advect(x, y, **kw_p, rk4=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "LAVD\n----\n\n" + "## LAVD\n\n" ] }, { @@ -127,7 +127,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Anim\n^^^^\nMovie of LAVD integration at each integration time step.\n\n" + "### Anim\nMovie of LAVD integration at each integration time step.\n\n" ] }, { @@ -145,7 +145,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Final LAVD\n^^^^^^^^^^\n\n" + "### Final LAVD\n\n" ] }, { @@ -163,7 +163,7 @@ }, "outputs": [], "source": [ - "lavd = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"),\n datas=dict(lavd=lavd.T, lon=x_g, lat=y_g,),\n centered=True,\n)" + "lavd = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"), datas=dict(lavd=lavd.T, lon=x_g, lat=y_g), centered=True\n)" ] }, { @@ -201,7 +201,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb index 8ee136b3..6f52e750 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_fsle_med.ipynb @@ -15,7 +15,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "\nFSLE experiment in med\n======================\n\nExample to build Finite Size Lyapunov Exponents, parameter values must be adapted for your case.\n\nExample use a method similar to `AVISO flse`_\n\n https://www.aviso.altimetry.fr/en/data/products/value-added-products/\n fsle-finite-size-lyapunov-exponents/fsle-description.html\n" + "\n# FSLE experiment in med\n\nExample to build Finite Size Lyapunov Exponents, parameter values must be adapted for your case.\n\nExample use a method similar to `AVISO flse`_\n\n https://www.aviso.altimetry.fr/en/data/products/value-added-products/\n fsle-finite-size-lyapunov-exponents/fsle-description.html\n" ] }, { @@ -33,7 +33,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "ADT in med\n----------\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" + "## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n" ] }, { @@ -51,7 +51,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Methods to compute FSLE\n-----------------------\n\n" + "## Methods to compute FSLE\n\n" ] }, { @@ -62,14 +62,14 @@ }, "outputs": [], "source": [ - "@njit(cache=True, fastmath=True)\ndef check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6):\n \"\"\"\n Check if distance between eastern or northern particle to center particle is bigger than `dist_max`\n \"\"\"\n nb_p = x.shape[0] // 3\n delta = dist_max ** 2\n for i in range(nb_p):\n i0 = i * 3\n i_n = i0 + 1\n i_e = i0 + 2\n # If particle already set, we skip\n if m[i0] or m[i_n] or m[i_e]:\n continue\n # Distance with north\n dxn, dyn = x[i0] - x[i_n], y[i0] - y[i_n]\n dn = dxn ** 2 + dyn ** 2\n # Distance with east\n dxe, dye = x[i0] - x[i_e], y[i0] - y[i_e]\n de = dxe ** 2 + dye ** 2\n\n if dn >= delta or de >= delta:\n s1 = dn + de\n at1 = 2 * (dxe * dxn + dye * dyn)\n at2 = de - dn\n s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * (\n (dxn - dye) ** 2 + (dxe + dyn) ** 2\n )\n flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init ** 2) * (s1 + s2 ** 0.5))\n theta[i] = arctan2(at1, at2 + s2) * 180 / pi\n # To know where value are set\n m_set[i] = False\n # To stop particle advection\n m[i0], m[i_n], m[i_e] = True, True, True\n\n\n@njit(cache=True)\ndef build_triplet(x, y, step=0.02):\n \"\"\"\n Triplet building for each position we add east and north point with defined step\n \"\"\"\n nb_x = x.shape[0]\n x_ = empty(nb_x * 3, dtype=x.dtype)\n y_ = empty(nb_x * 3, dtype=y.dtype)\n for i in range(nb_x):\n i0 = i * 3\n i_n, i_e = i0 + 1, i0 + 2\n x__, y__ = x[i], y[i]\n x_[i0], y_[i0] = x__, y__\n x_[i_n], y_[i_n] = x__, y__ + step\n x_[i_e], y_[i_e] = x__ + step, y__\n return x_, y_" + "@njit(cache=True, fastmath=True)\ndef check_p(x, y, flse, theta, m_set, m, dt, dist_init=0.02, dist_max=0.6):\n \"\"\"\n Check if distance between eastern or northern particle to center particle is bigger than `dist_max`\n \"\"\"\n nb_p = x.shape[0] // 3\n delta = dist_max**2\n for i in range(nb_p):\n i0 = i * 3\n i_n = i0 + 1\n i_e = i0 + 2\n # If particle already set, we skip\n if m[i0] or m[i_n] or m[i_e]:\n continue\n # Distance with north\n dxn, dyn = x[i0] - x[i_n], y[i0] - y[i_n]\n dn = dxn**2 + dyn**2\n # Distance with east\n dxe, dye = x[i0] - x[i_e], y[i0] - y[i_e]\n de = dxe**2 + dye**2\n\n if dn >= delta or de >= delta:\n s1 = dn + de\n at1 = 2 * (dxe * dxn + dye * dyn)\n at2 = de - dn\n s2 = ((dxn + dye) ** 2 + (dxe - dyn) ** 2) * (\n (dxn - dye) ** 2 + (dxe + dyn) ** 2\n )\n flse[i] = 1 / (2 * dt) * log(1 / (2 * dist_init**2) * (s1 + s2**0.5))\n theta[i] = arctan2(at1, at2 + s2) * 180 / pi\n # To know where value are set\n m_set[i] = False\n # To stop particle advection\n m[i0], m[i_n], m[i_e] = True, True, True\n\n\n@njit(cache=True)\ndef build_triplet(x, y, step=0.02):\n \"\"\"\n Triplet building for each position we add east and north point with defined step\n \"\"\"\n nb_x = x.shape[0]\n x_ = empty(nb_x * 3, dtype=x.dtype)\n y_ = empty(nb_x * 3, dtype=y.dtype)\n for i in range(nb_x):\n i0 = i * 3\n i_n, i_e = i0 + 1, i0 + 2\n x__, y__ = x[i], y[i]\n x_[i0], y_[i0] = x__, y__\n x_[i_n], y_[i_n] = x__, y__ + step\n x_[i_e], y_[i_e] = x__ + step, y__\n return x_, y_" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Settings\n--------\n\n" + "## Settings\n\n" ] }, { @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Particles\n---------\n\n" + "## Particles\n\n" ] }, { @@ -105,7 +105,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "FSLE\n----\n\n" + "## FSLE\n\n" ] }, { @@ -116,14 +116,14 @@ }, "outputs": [], "source": [ - "# Array to compute fsle\nfsle = zeros(x0.shape[0], dtype=\"f4\")\ntheta = zeros(x0.shape[0], dtype=\"f4\")\nmask = ones(x0.shape[0], dtype=\"f4\")\nx, y = build_triplet(x0, y0, dist_init)\nused = zeros(x.shape[0], dtype=\"bool\")\n\n# advection generator\nkw = dict(t_init=t0, nb_step=1, backward=backward, mask_particule=used)\np = c.advect(x, y, \"u\", \"v\", time_step=86400 / time_step_by_days, **kw)\n\n# We check at each step of advection if particle distance is over `dist_max`\nfor i in range(time_step_by_days * nb_days):\n t, xt, yt = p.__next__()\n dt = t / 86400.0 - t0\n check_p(xt, yt, fsle, theta, mask, used, dt, dist_max=dist_max, dist_init=dist_init)\n\n# Get index with original_position\ni = ((x0 - x0_) / step_grid_out).astype(\"i4\")\nj = ((y0 - y0_) / step_grid_out).astype(\"i4\")\nfsle_ = empty(grid_shape, dtype=\"f4\")\ntheta_ = empty(grid_shape, dtype=\"f4\")\nmask_ = ones(grid_shape, dtype=\"bool\")\nfsle_[i, j] = fsle\ntheta_[i, j] = theta\nmask_[i, j] = mask\n# Create a grid object\nfsle_custom = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"),\n datas=dict(\n fsle=ma.array(fsle_, mask=mask_),\n theta=ma.array(theta_, mask=mask_),\n lon=lon_p,\n lat=lat_p,\n ),\n centered=True,\n)" + "# Array to compute fsle\nfsle = zeros(x0.shape[0], dtype=\"f4\")\ntheta = zeros(x0.shape[0], dtype=\"f4\")\nmask = ones(x0.shape[0], dtype=\"f4\")\nx, y = build_triplet(x0, y0, dist_init)\nused = zeros(x.shape[0], dtype=\"bool\")\n\n# advection generator\nkw = dict(t_init=t0, nb_step=1, backward=backward, mask_particule=used, u_name=\"u\", v_name=\"v\")\np = c.advect(x, y, time_step=86400 / time_step_by_days, **kw)\n\n# We check at each step of advection if particle distance is over `dist_max`\nfor i in range(time_step_by_days * nb_days):\n t, xt, yt = p.__next__()\n dt = t / 86400.0 - t0\n check_p(xt, yt, fsle, theta, mask, used, dt, dist_max=dist_max, dist_init=dist_init)\n\n# Get index with original_position\ni = ((x0 - x0_) / step_grid_out).astype(\"i4\")\nj = ((y0 - y0_) / step_grid_out).astype(\"i4\")\nfsle_ = empty(grid_shape, dtype=\"f4\")\ntheta_ = empty(grid_shape, dtype=\"f4\")\nmask_ = ones(grid_shape, dtype=\"bool\")\nfsle_[i, j] = fsle\ntheta_[i, j] = theta\nmask_[i, j] = mask\n# Create a grid object\nfsle_custom = RegularGridDataset.with_array(\n coordinates=(\"lon\", \"lat\"),\n datas=dict(\n fsle=ma.array(fsle_, mask=mask_),\n theta=ma.array(theta_, mask=mask_),\n lon=lon_p,\n lat=lat_p,\n ),\n centered=True,\n)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Display FSLE\n------------\n\n" + "## Display FSLE\n\n" ] }, { @@ -141,7 +141,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Display Theta\n-------------\n\n" + "## Display Theta\n\n" ] }, { @@ -172,7 +172,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb index bd197c57..708d7024 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_lavd_detection.ipynb @@ -84,7 +84,7 @@ }, "outputs": [], "source": [ - "# Time properties, for example with advection only 25 days\nnb_days, step_by_day = 25, 6\nnb_time = step_by_day * nb_days\nkw_p = dict(nb_step=1, time_step=86400 / step_by_day)\nt0 = 20236\nt0_grid = c[t0]\n# Geographic properties, we use a coarser resolution for time consuming reasons\nstep = 1 / 32.0\nx_g, y_g = arange(-6, 36, step), arange(30, 46, step)\nx0, y0 = meshgrid(x_g, y_g)\noriginal_shape = x0.shape\nx0, y0 = x0.reshape(-1), y0.reshape(-1)\n# Get all particles in defined area\nm = ~isnan(t0_grid.interp(\"vort\", x0, y0))\nx0, y0 = x0[m], y0[m]\nprint(f\"{x0.size} particles advected\")\n# Gridded mask\nm = m.reshape(original_shape)" + "# Time properties, for example with advection only 25 days\nnb_days, step_by_day = 25, 6\nnb_time = step_by_day * nb_days\nkw_p = dict(nb_step=1, time_step=86400 / step_by_day, u_name=\"u\", v_name=\"v\")\nt0 = 20236\nt0_grid = c[t0]\n# Geographic properties, we use a coarser resolution for time consuming reasons\nstep = 1 / 32.0\nx_g, y_g = arange(-6, 36, step), arange(30, 46, step)\nx0, y0 = meshgrid(x_g, y_g)\noriginal_shape = x0.shape\nx0, y0 = x0.reshape(-1), y0.reshape(-1)\n# Get all particles in defined area\nm = ~isnan(t0_grid.interp(\"vort\", x0, y0))\nx0, y0 = x0[m], y0[m]\nprint(f\"{x0.size} particles advected\")\n# Gridded mask\nm = m.reshape(original_shape)" ] }, { @@ -102,7 +102,7 @@ }, "outputs": [], "source": [ - "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = c.advect(x0.copy(), y0.copy(), \"u\", \"v\", t_init=t0, **kw_p)\nfor _ in range(nb_time):\n t, x, y = p.__next__()\n lavd_ += abs(c.interp(\"vort\", t / 86400.0, x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_forward = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a forward advection\")\nmappable = lavd_forward.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" + "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = c.advect(x0.copy(), y0.copy(), t_init=t0, **kw_p)\nfor _ in range(nb_time):\n t, x, y = p.__next__()\n lavd_ += abs(c.interp(\"vort\", t / 86400.0, x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_forward = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a forward advection\")\nmappable = lavd_forward.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" ] }, { @@ -120,7 +120,7 @@ }, "outputs": [], "source": [ - "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = c.advect(x0.copy(), y0.copy(), \"u\", \"v\", t_init=t0, backward=True, **kw_p)\nfor i in range(nb_time):\n t, x, y = p.__next__()\n lavd_ += abs(c.interp(\"vort\", t / 86400.0, x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_backward = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a backward advection\")\nmappable = lavd_backward.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" + "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = c.advect(x0.copy(), y0.copy(), t_init=t0, backward=True, **kw_p)\nfor i in range(nb_time):\n t, x, y = p.__next__()\n lavd_ += abs(c.interp(\"vort\", t / 86400.0, x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_backward = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a backward advection\")\nmappable = lavd_backward.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" ] }, { @@ -138,7 +138,7 @@ }, "outputs": [], "source": [ - "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = t0_grid.advect(x0.copy(), y0.copy(), \"u\", \"v\", **kw_p)\nfor _ in range(nb_time):\n x, y = p.__next__()\n lavd_ += abs(t0_grid.interp(\"vort\", x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_forward_static = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a forward advection on a static velocity field\")\nmappable = lavd_forward_static.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" + "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = t0_grid.advect(x0.copy(), y0.copy(), **kw_p)\nfor _ in range(nb_time):\n x, y = p.__next__()\n lavd_ += abs(t0_grid.interp(\"vort\", x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_forward_static = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a forward advection on a static velocity field\")\nmappable = lavd_forward_static.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" ] }, { @@ -156,7 +156,7 @@ }, "outputs": [], "source": [ - "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = t0_grid.advect(x0.copy(), y0.copy(), \"u\", \"v\", backward=True, **kw_p)\nfor i in range(nb_time):\n x, y = p.__next__()\n lavd_ += abs(t0_grid.interp(\"vort\", x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_backward_static = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a backward advection on a static velocity field\")\nmappable = lavd_backward_static.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" + "lavd = zeros(original_shape)\nlavd_ = lavd[m]\np = t0_grid.advect(x0.copy(), y0.copy(), backward=True, **kw_p)\nfor i in range(nb_time):\n x, y = p.__next__()\n lavd_ += abs(t0_grid.interp(\"vort\", x, y))\nlavd[m] = lavd_ / nb_time\n# Put LAVD result in a standard py eddy tracker grid\nlavd_backward_static = LAVDGrid.from_(x_g, y_g, ma.array(lavd, mask=~m).T)\n# Display\nfig, ax, _ = start_ax(\"LAVD with a backward advection on a static velocity field\")\nmappable = lavd_backward_static.display(ax, \"lavd\", **kw_lavd)\n_ = update_axes(ax, mappable)" ] }, { @@ -194,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.9" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb b/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb index 53365ac7..b92c4d21 100644 --- a/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb +++ b/notebooks/python_module/07_cube_manipulation/pet_particles_drift.ipynb @@ -44,7 +44,7 @@ }, "outputs": [], "source": [ - "c = GridCollection.from_netcdf_cube(\n get_demo_path(\"dt_med_allsat_phy_l4_2005T2.nc\"),\n \"longitude\",\n \"latitude\",\n \"time\",\n heigth=\"adt\",\n)" + "c = GridCollection.from_netcdf_cube(\n get_demo_path(\"dt_med_allsat_phy_l4_2005T2.nc\"),\n \"longitude\",\n \"latitude\",\n \"time\",\n unset=True\n)" ] }, { @@ -80,7 +80,7 @@ }, "outputs": [], "source": [ - "x0, y0 = meshgrid(arange(32, 35, 0.5), arange(32.5, 34.5, 0.5))\nx0, y0 = x0.reshape(-1), y0.reshape(-1)\nt, x, y = c.path(x0, y0, \"u\", \"v\", t_init=t0, **kw_p, nb_time=nb_time)" + "x0, y0 = meshgrid(arange(32, 35, 0.5), arange(32.5, 34.5, 0.5))\nx0, y0 = x0.reshape(-1), y0.reshape(-1)\nt, x, y = c.path(x0, y0, h_name=\"adt\", t_init=t0, **kw_p, nb_time=nb_time)" ] }, { @@ -118,7 +118,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.7" + "version": "3.10.6" } }, "nbformat": 4, diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index d363a5dd..b0bb7bbf 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -84,7 +84,7 @@ def advect(x, y, c, t0, n_days, u_name="u", v_name="v"): if n_days < 0: kw["backward"] = True n_days = -n_days - p = c.advect(x, y, u_name, v_name, t_init=t0, **kw) + p = c.advect(x, y, u_name=u_name, v_name=v_name, t_init=t0, **kw) for _ in range(n_days): t, x, y = p.__next__() return t, x, y From 1b2c4c78c572384042a16ddd86791e331727a8e2 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 10:47:53 +0200 Subject: [PATCH 077/115] Add wrapping longitude test --- tests/test_generic.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/tests/test_generic.py b/tests/test_generic.py index ab3832cc..29cb64b7 100644 --- a/tests/test_generic.py +++ b/tests/test_generic.py @@ -1,6 +1,6 @@ from numpy import arange, array, nan, ones, zeros -from py_eddy_tracker.generic import cumsum_by_track, simplify +from py_eddy_tracker.generic import cumsum_by_track, simplify, wrap_longitude def test_simplify(): @@ -30,3 +30,22 @@ def test_cumsum_by_track(): a = ones(10, dtype="i4") * 2 track = array([1, 1, 2, 2, 2, 2, 44, 44, 44, 48]) assert (cumsum_by_track(a, track) == [2, 4, 2, 4, 6, 8, 2, 4, 6, 2]).all() + + +def test_wrapping(): + y = x = arange(-5,5, dtype='f4') + x_, _ = wrap_longitude(x, y, ref=-10) + assert (x_ == x).all() + x_, _ = wrap_longitude(x, y, ref=1) + assert x.size == x_.size + assert (x_[6:] == x[6:]).all() + assert (x_[:6] == x[:6] + 360).all() + x_, _ = wrap_longitude(x, y, ref=1, cut=True) + assert x.size + 3 == x_.size + assert (x_[6 + 3:] == x[6:]).all() + assert (x_[:7] == x[:7] + 360).all() + + # FIXME Need evolution in wrap_longitude + # x %= 360 + # x_, _ = wrap_longitude(x, y, ref=-10, cut=True) + # assert x.size == x_.size From c7fbbd7636650fcc1dc57077c8b9e988a34e9f69 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 10:48:25 +0200 Subject: [PATCH 078/115] Add hybrid method and speed up union method --- src/py_eddy_tracker/poly.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 6adb02c1..deabd3ea 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -431,7 +431,7 @@ def merge(x, y): return concatenate(x), concatenate(y) -def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False): +def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False, hybrid_area=False, min_overlap=0): r""" Return percent of overlap for each item. @@ -441,6 +441,9 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False): :param array y1: y for polygon list 1 :param bool minimal_area: If True, function will compute intersection/little polygon, else intersection/union :param bool p1_area: If True, function will compute intersection/p1 polygon, else intersection/union + :param bool hybrid_area: If True, function will compute like union, + but if cost is under min_overlap, obs is kept in case of fully included + :param float min_overlap: under this value cost is set to zero :return: Result of cost function :rtype: array @@ -466,14 +469,25 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False): # Area of intersection intersection = (p0 & p1).area() # we divide intersection with the little one result from 0 to 1 + if intersection == 0: + cost[i] = 0 + continue + p0_area_, p1_area_ = p0.area(), p1.area() if minimal_area: - cost[i] = intersection / min(p0.area(), p1.area()) + cost_ = intersection / min(p0_area_, p1_area_) # we divide intersection with p1 elif p1_area: - cost[i] = intersection / p1.area() + cost_ = intersection / p1_area_ # we divide intersection with polygon merging result from 0 to 1 else: - cost[i] = intersection / (p0 + p1).area() + cost_ = intersection / (p0_area_ + p1_area_ - intersection) + if cost_ >= min_overlap: + cost[i] = cost_ + else: + if hybrid_area and cost_ != 0 and (intersection / min(p0_area_, p1_area_)) > .99: + cost[i] = cost_ + else: + cost[i] = 0 return cost From 66f9905f2894b191a1bb9e05e3071c1adc508db3 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 10:51:43 +0200 Subject: [PATCH 079/115] Remove reference to obs or observation to be easily replace by store later --- src/py_eddy_tracker/observations/groups.py | 11 +++---- .../observations/observation.py | 31 +++++++++---------- src/py_eddy_tracker/observations/tracking.py | 18 +++++------ 3 files changed, 27 insertions(+), 33 deletions(-) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index b0bb7bbf..54ae013c 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -292,15 +292,14 @@ def filled_by_interpolation(self, mask): nb_obs = len(self) index = arange(nb_obs) - for field in self.obs.dtype.descr: - var = field[0] + for field in self.fields: if ( - var in ["n", "virtual", "track", "cost_association"] - or var in self.array_variables + field in ["n", "virtual", "track", "cost_association"] + or field in self.array_variables ): continue - self.obs[var][mask] = interp( - index[mask], index[~mask], self.obs[var][~mask] + self.obs[field][mask] = interp( + index[mask], index[~mask], self.obs[field][~mask] ) def insert_virtual(self): diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index ae95315e..29fcf434 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -264,7 +264,7 @@ def get_infos(self): bins_lat=(-90, -60, -15, 15, 60, 90), bins_amplitude=array((0, 1, 2, 3, 4, 5, 10, 500)), bins_radius=array((0, 15, 30, 45, 60, 75, 100, 200, 2000)), - nb_obs=self.observations.shape[0], + nb_obs=len(self), ) t0, t1 = self.period infos["t0"], infos["t1"] = t0, t1 @@ -341,7 +341,7 @@ def __repr__(self): bins_lat = (-90, -60, -15, 15, 60, 90) bins_amplitude = array((0, 1, 2, 3, 4, 5, 10, 500)) bins_radius = array((0, 15, 30, 45, 60, 75, 100, 200, 2000)) - nb_obs = self.observations.shape[0] + nb_obs = len(self) return f""" | {nb_obs} observations from {t0} to {t1} ({period} days, ~{nb_obs / period:.0f} obs/day) | Speed area : {self.speed_area.sum() / period / 1e12:.2f} Mkm²/day @@ -416,7 +416,7 @@ def remove_fields(self, *fields): """ Copy with fields listed remove """ - nb_obs = self.obs.shape[0] + nb_obs = len(self) fields = set(fields) only_variables = set(self.fields) - fields track_extra_variables = set(self.track_extra_variables) - fields @@ -439,7 +439,7 @@ def add_fields(self, fields=list(), array_fields=list()): """ Add a new field. """ - nb_obs = self.obs.shape[0] + nb_obs = len(self) new = self.__class__( size=nb_obs, track_extra_variables=list( @@ -547,9 +547,9 @@ def merge(self, other): nb_obs_self = len(self) nb_obs = nb_obs_self + len(other) eddies = self.new_like(self, nb_obs) - other_keys = other.obs.dtype.fields.keys() - self_keys = self.obs.dtype.fields.keys() - for key in eddies.obs.dtype.fields.keys(): + other_keys = other.fields + self_keys = self.fields + for key in eddies.fields: eddies.obs[key][:nb_obs_self] = self.obs[key][:] if key in other_keys: eddies.obs[key][nb_obs_self:] = other.obs[key][:] @@ -657,8 +657,8 @@ def insert_observations(self, other, index): """Insert other obs in self at the given index.""" if not self.coherence(other): raise Exception("Observations with no coherence") - insert_size = len(other.obs) - self_size = len(self.obs) + insert_size = len(other) + self_size = len(self) new_size = self_size + insert_size if self_size == 0: self.observations = other.obs @@ -1542,8 +1542,7 @@ def to_zarr(self, handler, **kwargs): handler.attrs["track_array_variables"] = self.track_array_variables handler.attrs["array_variables"] = ",".join(self.array_variables) # Iter on variables to create: - fields = [field[0] for field in self.observations.dtype.descr] - for ori_name in fields: + for ori_name in self.fields: # Patch for a transition name = ori_name # @@ -1588,12 +1587,11 @@ def to_netcdf(self, handler, **kwargs): handler.track_array_variables = self.track_array_variables handler.array_variables = ",".join(self.array_variables) # Iter on variables to create: - fields = [field[0] for field in self.observations.dtype.descr] fields_ = array( - [VAR_DESCR[field[0]]["nc_name"] for field in self.observations.dtype.descr] + [VAR_DESCR[field]["nc_name"] for field in self.fields] ) i = fields_.argsort() - for ori_name in array(fields)[i]: + for ori_name in array(self.fields)[i]: # Patch for a transition name = ori_name # @@ -1865,10 +1863,9 @@ def extract_with_mask(self, mask): if nb_obs == 0: logger.warning("Empty dataset will be created") else: - for field in self.obs.dtype.descr: + for field in self.fields: logger.debug("Copy of field %s ...", field) - var = field[0] - new.obs[var] = self.obs[var][mask] + new.obs[field] = self.obs[field][mask] return new def scatter(self, ax, name=None, ref=None, factor=1, **kwargs): diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index f1d2399b..4e0f9bcd 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -118,7 +118,7 @@ def __repr__(self): t0, t1 = self.period period = t1 - t0 + 1 nb = self.nb_obs_by_track - nb_obs = self.observations.shape[0] + nb_obs = len(self) m = self.virtual.astype("bool") nb_m = m.sum() bins_t = (1, 30, 90, 180, 270, 365, 1000, 10000) @@ -147,7 +147,7 @@ def __repr__(self): def add_distance(self): """Add a field of distance (m) between two consecutive observations, 0 for the last observation of each track""" - if "distance_next" in self.observations.dtype.descr: + if "distance_next" in self.fields: return self new = self.add_fields(("distance_next",)) new["distance_next"][:1] = self.distance_to_next() @@ -205,10 +205,9 @@ def extract_longer_eddies(self, nb_min, nb_obs, compress_id=True): logger.info("Selection of %d observations", nb_obs_select) eddies = self.__class__.new_like(self, nb_obs_select) eddies.sign_type = self.sign_type - for field in self.obs.dtype.descr: + for field in self.fields: logger.debug("Copy of field %s ...", field) - var = field[0] - eddies.obs[var] = self.obs[var][mask] + eddies.obs[field] = self.obs[field][mask] if compress_id: list_id = unique(eddies.obs.track) list_id.sort() @@ -387,13 +386,13 @@ def extract_toward_direction(self, west=True, delta_lon=None): def extract_first_obs_in_box(self, res): data = empty( - self.obs.shape, dtype=[("lon", "f4"), ("lat", "f4"), ("track", "i4")] + len(self), dtype=[("lon", "f4"), ("lat", "f4"), ("track", "i4")] ) data["lon"] = self.longitude - self.longitude % res data["lat"] = self.latitude - self.latitude % res data["track"] = self.track _, indexs = unique(data, return_index=True) - mask = zeros(self.obs.shape, dtype="bool") + mask = zeros(len(self), dtype="bool") mask[indexs] = True return self.extract_with_mask(mask) @@ -508,10 +507,9 @@ def extract_with_mask( if nb_obs == 0: logger.info("Empty dataset will be created") else: - for field in self.obs.dtype.descr: + for field in self.fields: logger.debug("Copy of field %s ...", field) - var = field[0] - new.obs[var] = self.obs[var][mask] + new.obs[field] = self.obs[field][mask] if compress_id: list_id = unique(new.track) list_id.sort() From 3e73e63adc6c71e3f040bfbb3d9f4e7e234eb05e Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 10:56:18 +0200 Subject: [PATCH 080/115] Add hybrid method in appli --- src/py_eddy_tracker/appli/network.py | 13 +++++++++++- src/py_eddy_tracker/observations/network.py | 21 +++++++------------- src/py_eddy_tracker/observations/tracking.py | 12 ++++------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index 03c5eb35..f488168e 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -36,6 +36,11 @@ def build_network(): action="store_true", help="If True, use intersection/little polygon, else intersection/union", ) + parser.add_argument( + "--hybrid-area", + action="store_true", + help="If True, use minimal-area method if overlap is under min overlap, else intersection/union", + ) parser.contour_intern_arg() @@ -49,7 +54,7 @@ def build_network(): memory=args.memory, ) group = n.group_observations( - min_overlap=args.min_overlap, minimal_area=args.minimal_area + min_overlap=args.min_overlap, minimal_area=args.minimal_area, hybrid_area=args.hybrid_area ) n.build_dataset(group).write_file(filename=args.out) @@ -74,6 +79,11 @@ def divide_network(): action="store_true", help="If True, use intersection/little polygon, else intersection/union", ) + parser.add_argument( + "--hybrid-area", + action="store_true", + help="If True, use minimal-area method if overlap is under min overlap, else intersection/union", + ) args = parser.parse_args() contour_name = TrackEddiesObservations.intern(args.intern, public_label=True) e = TrackEddiesObservations.load_file( @@ -87,6 +97,7 @@ def divide_network(): window=args.window, min_overlap=args.min_overlap, minimal_area=args.minimal_area, + hybrid_area=args.hybrid_area ), ) n.write_file(filename=args.out) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 604035e4..c395bd8d 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -1913,21 +1913,14 @@ def group_translator(nb, duos): apply_replace(translate, gr_i, gr_j) return translate - def group_observations(self, min_overlap=0.2, minimal_area=False): + def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): """Store every interaction between identifications - Parameters - ---------- - minimal_area : bool, optional - If True, function will compute intersection/little polygon, else intersection/union, by default False + :param bool minimal_area: If True, function will compute intersection/little polygon, else intersection/union, by default False + :param float min_overlap: minimum overlap area to associate observations, by default 0.2 - min_overlap : float, optional - minimum overlap area to associate observations, by default 0.2 - - Returns - ------- - TrackEddiesObservations - netcdf with interactions + :return: + :rtype: TrackEddiesObservations """ results, nb_obs = list(), list() @@ -1945,9 +1938,9 @@ def group_observations(self, min_overlap=0.2, minimal_area=False): ii, ij = bbox_intersection(xi, yi, xj, yj) m = ( vertice_overlap( - xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area, min_overlap=min_overlap, **kwargs ) - > min_overlap + != 0 ) results.append((i, j, ii[m], ij[m])) if display_iteration: diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 4e0f9bcd..4d155605 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -720,7 +720,7 @@ def get_previous_obs( time_ref, window, min_overlap=0.2, - minimal_area=False, + **kwargs, ): """Backward association of observations to the segments""" time_cur = int_(ids["time"][i_current]) @@ -737,10 +737,8 @@ def get_previous_obs( continue c = zeros(len(xj)) c[ij] = vertice_overlap( - xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + xi[ii], yi[ii], xj[ij], yj[ij], min_overlap=min_overlap, **kwargs ) - # We remove low overlap - c[c < min_overlap] = 0 # We get index of maximal overlap i = c.argmax() c_i = c[i] @@ -762,7 +760,7 @@ def get_next_obs( time_ref, window, min_overlap=0.2, - minimal_area=False, + **kwargs ): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 @@ -782,10 +780,8 @@ def get_next_obs( continue c = zeros(len(xj)) c[ij] = vertice_overlap( - xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area + xi[ii], yi[ii], xj[ij], yj[ij], min_overlap=min_overlap, **kwargs ) - # We remove low overlap - c[c < min_overlap] = 0 # We get index of maximal overlap i = c.argmax() c_i = c[i] From 943bbf3730b6192a3aa3c3f9106e5c94adc94269 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 11:01:44 +0200 Subject: [PATCH 081/115] Rewrite method to extract event for speed up --- src/py_eddy_tracker/observations/network.py | 236 +++++++++++--------- 1 file changed, 133 insertions(+), 103 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index c395bd8d..65b9e636 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -113,6 +113,20 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.reset_index() + def __repr__(self): + m_event, s_event = self.merging_event(only_index=True, triplet=True)[0], self.splitting_event(only_index=True, triplet=True)[0] + period = (self.period[1] - self.period[0]) / 365.25 + nb_by_network = self.network_size() + big = 50_000 + infos = [ + f"Atlas with {self.nb_network} networks ({self.nb_network / period:0.0f} networks/year)," + f" {self.nb_segment} segments ({self.nb_segment / period:0.0f} segments/year), {len(self)} observations ({len(self) / period:0.0f} observations/year)", + f" {m_event.size} merging ({m_event.size / period:0.0f} merging/year), {s_event.size} splitting ({s_event.size / period:0.0f} splitting/year)", + f" with {(nb_by_network > big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network> big].sum()} observations cumulate)", + f" {nb_by_network[0]} observations in trash" + ] + return "\n".join(infos) + def reset_index(self): self._index_network = None self._index_segment_track = None @@ -313,13 +327,19 @@ def correct_close_events(self, nb_days_max=20): """ Transform event where segment A splits from segment B, then x days after segment B merges with A - to - segment A splits from segment B then x days after segment A merges with B (B will be longer) - These events have to last less than `nb_days_max` to be changed. + + ------------------- A + / / + B -------------------- + to + --A-- + / \ + B ----------------------------------- + :param float nb_days_max: maximum time to search for splitting-merging event """ @@ -342,7 +362,7 @@ def correct_close_events(self, nb_days_max=20): segments_connexion[seg] = [i, i_p, i_n] for seg in sorted(segments_connexion.keys()): - seg_slice, i_seg_p, i_seg_n = segments_connexion[seg] + seg_slice, _, i_seg_n = segments_connexion[seg] # the segment ID has to be corrected, because we may have changed it since seg_corrected = segment[seg_slice.stop - 1] @@ -370,8 +390,6 @@ def correct_close_events(self, nb_days_max=20): segments_connexion[seg_corrected][0] = my_slice - self.segment[:] = segment_copy - self.previous_obs[:] = previous_obs return self.sort() def sort(self, order=("track", "segment", "time")): @@ -495,8 +513,10 @@ def func_backward(seg, indice): return self.extract_with_mask(mask) def connexions(self, multi_network=False): - """ - Create dictionnary for each segment, gives the segments in interaction with + """Create dictionnary for each segment, gives the segments in interaction with + + :param bool multi_network: use segment_track_array instead of segment, defaults to False + :return dict: Return dict of set, for each seg id we get set of segment which have event with him """ if multi_network: segment = self.segment_track_array @@ -504,26 +524,27 @@ def connexions(self, multi_network=False): self.only_one_network() segment = self.segment segments_connexion = dict() - - def add_seg(father, child): - if father not in segments_connexion: - segments_connexion[father] = set() - segments_connexion[father].add(child) - - previous_obs, next_obs = self.previous_obs, self.next_obs - for i, seg, _ in self.iter_on(segment): - if i.start == i.stop: - continue - i_p, i_n = previous_obs[i.start], next_obs[i.stop - 1] - # segment in interaction - p_seg, n_seg = segment[i_p], segment[i_n] - # Where segment are called - if i_p != -1: - add_seg(p_seg, seg) - add_seg(seg, p_seg) - if i_n != -1: - add_seg(n_seg, seg) - add_seg(seg, n_seg) + def add_seg(s1, s2): + if s1 not in segments_connexion: + segments_connexion[s1] = set() + if s2 not in segments_connexion: + segments_connexion[s2] = set() + segments_connexion[s1].add(s2), segments_connexion[s2].add(s1) + # Get index for each segment + i0, i1, _ = self.index_segment_track + i1 = i1 - 1 + # Check if segment merge + i_next = self.next_obs[i1] + m_n = i_next != -1 + # Check if segment come from splitting + i_previous = self.previous_obs[i0] + m_p = i_previous != -1 + # For each split + for s1, s2 in zip(segment[i_previous[m_p]], segment[i0[m_p]]): + add_seg(s1, s2) + # For each merge + for s1, s2 in zip(segment[i_next[m_n]], segment[i1[m_n]]): + add_seg(s1, s2) return segments_connexion @classmethod @@ -1089,34 +1110,22 @@ def segment_track_array(self): return self._segment_track_array def birth_event(self): - """Extract birth events. - Advice : individual eddies (self.track == 0) should be removed before -> apply remove_trash.""" - # FIXME how to manage group 0 - indices = list() - previous_obs = self.previous_obs - for i, _, _ in self.iter_on(self.segment_track_array): - nb = i.stop - i.start - if nb == 0: - continue - i_p = previous_obs[i.start] - if i_p == -1: - indices.append(i.start) - return self.extract_event(list(set(indices))) + """Extract birth events.""" + i_start, _, _ = self.index_segment_track + indices = i_start[self.previous_obs[i_start] == -1] + if self.first_is_trash(): + indices = indices[1:] + return self.extract_event(indices) + generation_event = birth_event def death_event(self): - """Extract death events. - Advice : individual eddies (self.track == 0) should be removed before -> apply remove_trash.""" - # FIXME how to manage group 0 - indices = list() - next_obs = self.next_obs - for i, _, _ in self.iter_on(self.segment_track_array): - nb = i.stop - i.start - if nb == 0: - continue - i_n = next_obs[i.stop - 1] - if i_n == -1: - indices.append(i.stop - 1) - return self.extract_event(list(set(indices))) + """Extract death events.""" + _, i_stop, _ = self.index_segment_track + indices = i_stop[self.next_obs[i_stop - 1] == -1] - 1 + if self.first_is_trash(): + indices = indices[1:] + return self.extract_event(indices) + dissipation_event = death_event def merging_event(self, triplet=False, only_index=False): """Return observation after a merging event. @@ -1124,25 +1133,26 @@ def merging_event(self, triplet=False, only_index=False): If `triplet=True` return the eddy after a merging event, the eddy before the merging event, and the eddy stopped due to merging. """ - idx_m1 = list() + # Get start and stop for each segment, there is no empty segment + _, i1, _ = self.index_segment_track + # Get last index for each segment + i_stop = i1 - 1 + # Get target index + idx_m1 = self.next_obs[i_stop] + # Get mask and valid target + m = idx_m1 != -1 + idx_m1 = idx_m1[m] + # Sort by time event + i = self.time[idx_m1].argsort() + idx_m1 = idx_m1[i] if triplet: - idx_m0_stop = list() - idx_m0 = list() - next_obs, previous_obs = self.next_obs, self.previous_obs - for i, _, _ in self.iter_on(self.segment_track_array): - nb = i.stop - i.start - if nb == 0: - continue - i_n = next_obs[i.stop - 1] - if i_n != -1: - if triplet: - idx_m0_stop.append(i.stop - 1) - idx_m0.append(previous_obs[i_n]) - idx_m1.append(i_n) + # Get obs before target + idx_m0_stop = i_stop[m][i] + idx_m0 = self.previous_obs[idx_m1].copy() if triplet: if only_index: - return array(idx_m1), array(idx_m0), array(idx_m0_stop) + return idx_m1, idx_m0, idx_m0_stop else: return ( self.extract_event(idx_m1), @@ -1150,7 +1160,7 @@ def merging_event(self, triplet=False, only_index=False): self.extract_event(idx_m0_stop), ) else: - idx_m1 = list(set(idx_m1)) + idx_m1 = unique(idx_m1) if only_index: return idx_m1 else: @@ -1162,25 +1172,24 @@ def splitting_event(self, triplet=False, only_index=False): If `triplet=True` return the eddy before a splitting event, the eddy after the splitting event, and the eddy starting due to splitting. """ - idx_s0 = list() + # Get start and stop for each segment, there is no empty segment + i_start, _, _ = self.index_segment_track + # Get target index + idx_s0 = self.previous_obs[i_start] + # Get mask and valid target + m = idx_s0 != -1 + idx_s0 = idx_s0[m] + # Sort by time event + i = self.time[idx_s0].argsort() + idx_s0 = idx_s0[i] if triplet: - idx_s1_start = list() - idx_s1 = list() - next_obs, previous_obs = self.next_obs, self.previous_obs - for i, _, _ in self.iter_on(self.segment_track_array): - nb = i.stop - i.start - if nb == 0: - continue - i_p = previous_obs[i.start] - if i_p != -1: - if triplet: - idx_s1_start.append(i.start) - idx_s1.append(next_obs[i_p]) - idx_s0.append(i_p) + # Get obs after target + idx_s1_start = i_start[m][i] + idx_s1 = self.next_obs[idx_s0].copy() if triplet: if only_index: - return array(idx_s0), array(idx_s1), array(idx_s1_start) + return idx_s0, idx_s1, idx_s1_start else: return ( self.extract_event(idx_s0), @@ -1189,7 +1198,7 @@ def splitting_event(self, triplet=False, only_index=False): ) else: - idx_s0 = list(set(idx_s0)) + idx_s0 = unique(idx_s0) if only_index: return idx_s0 else: @@ -1199,7 +1208,7 @@ def dissociate_network(self): """ Dissociate networks with no known interaction (splitting/merging) """ - tags = self.tag_segment(multi_network=True) + tags = self.tag_segment() if self.track[0] == 0: tags -= 1 self.track[:] = tags[self.segment_track_array] @@ -1345,16 +1354,22 @@ def __tag_segment(cls, seg, tag, groups, connexions): # For each connexion we apply same function cls.__tag_segment(seg, tag, groups, connexions) - def tag_segment(self, multi_network=False): - if multi_network: - nb = self.segment_track_array[-1] + 1 - else: - nb = self.segment.max() + 1 + def tag_segment(self): + """For each segment, method give a new network id, and all segment are connected + + :return array: for each unique seg id, it return new network id + """ + nb = self.segment_track_array[-1] + 1 sub_group = zeros(nb, dtype="u4") - c = self.connexions(multi_network=multi_network) + c = self.connexions(multi_network=True) j = 1 # for each available id for i in range(nb): + # No connexions, no need to explore + if i not in c: + sub_group[i] = j + j+= 1 + continue # Skip if already set if sub_group[i] != 0: continue @@ -1363,15 +1378,31 @@ def tag_segment(self, multi_network=False): j += 1 return sub_group + def fully_connected(self): + """Suspicious + """ + raise Exception("Must be check") self.only_one_network() return self.tag_segment().shape[0] == 1 + def first_is_trash(self): + """Check if first network is Trash + + :return bool: True if first network is trash + """ + i_start, i_stop, _ = self.index_segment_track + sl = slice(i_start[0], i_stop[0]) + return (self.previous_obs[sl] == -1).all() and (self.next_obs[sl] == -1).all() + def remove_trash(self): """ Remove the lonely eddies (only 1 obs in segment, associated network number is 0) """ - return self.extract_with_mask(self.track != 0) + if self.first_is_trash(): + return self.extract_with_mask(self.track != 0) + else: + return self def plot(self, ax, ref=None, color_cycle=None, **kwargs): """ @@ -1551,12 +1582,11 @@ def extract_with_mask(self, mask): logger.debug( f"{nb_obs} observations will be extracted ({nb_obs / self.shape[0]:.3%})" ) - for field in self.obs.dtype.descr: + for field in self.fields: if field in ("next_obs", "previous_obs"): continue logger.debug("Copy of field %s ...", field) - var = field[0] - new.obs[var] = self.obs[var][mask] + new.obs[field] = self.obs[field][mask] # n & p must be re-index n, p = self.next_obs[mask], self.previous_obs[mask] # we add 2 for -1 index return index -1 @@ -1682,9 +1712,9 @@ def date2file(julian_day): return f"/tmp/dt_global_{date.strftime('%Y%m%d')}.nc" """ - - itb_final = -ones((self.obs.size, 2), dtype="i4") - ptb_final = zeros((self.obs.size, 2), dtype="i1") + shape = len(self), 2 + itb_final = -ones(shape, dtype="i4") + ptb_final = zeros(shape, dtype="i1") t_start, t_end = int(self.period[0]), int(self.period[1]) @@ -1760,9 +1790,9 @@ def date2file(julian_day): return f"/tmp/dt_global_{date.strftime('%Y%m%d')}.nc" """ - - itf_final = -ones((self.obs.size, 2), dtype="i4") - ptf_final = zeros((self.obs.size, 2), dtype="i1") + shape = len(self), 2 + itf_final = -ones(shape, dtype="i4") + ptf_final = zeros(shape, dtype="i1") t_start, t_end = int(self.period[0]), int(self.period[1]) From 3359edaf1b2df21ab61d16297560cd9e42406629 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 13 Oct 2022 11:02:25 +0200 Subject: [PATCH 082/115] Add ref in display_color --- src/py_eddy_tracker/generic.py | 10 ++++++---- src/py_eddy_tracker/observations/observation.py | 12 +++++++++++- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 7dbbf3c3..fbc17d07 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -456,17 +456,18 @@ def wrap_longitude(x, y, ref, cut=False): if cut: indexs = list() nb = x.shape[0] - new_previous = (x[0] - ref) % 360 + + new_x_previous = (x[0] - ref) % 360 + ref x_previous = x[0] for i in range(1, nb): x_ = x[i] - new_x = (x_ - ref) % 360 + new_x = (x_ - ref) % 360 + ref if not isnan(x_) and not isnan(x_previous): - d_new = new_x - new_previous + d_new = new_x - new_x_previous d = x_ - x_previous if abs(d - d_new) > 1e-5: indexs.append(i) - x_previous, new_previous = x_, new_x + x_previous, new_x_previous = x_, new_x nb_indexs = len(indexs) new_size = nb + nb_indexs * 3 @@ -477,6 +478,7 @@ def wrap_longitude(x, y, ref, cut=False): for i in range(nb): if j < nb_indexs and i == indexs[j]: j += 1 + # FIXME need check cor = 360 if x[i - 1] > x[i] else -360 out_x[i + i_] = (x[i] - ref) % 360 + ref - cor out_y[i + i_] = y[i] diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 29fcf434..2e4abef3 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2069,11 +2069,12 @@ def format_label(self, label): nb_obs=len(self), ) - def display_color(self, ax, field, intern=False, **kwargs): + def display_color(self, ax, field, ref=None, intern=False, **kwargs): """Plot colored contour of eddies :param matplotlib.axes.Axes ax: matplotlib axe used to draw :param str,array field: color field + :param float,None ref: if defined, all coordinates are wrapped with ref as western boundary :param bool intern: if True, draw the speed contour :param dict kwargs: look at :py:meth:`matplotlib.collections.LineCollection` @@ -2081,6 +2082,13 @@ def display_color(self, ax, field, intern=False, **kwargs): """ xname, yname = self.intern(intern) x, y = self[xname], self[yname] + + if ref is not None: + # TODO : maybe buggy with global display + shape_out = x.shape + x, y = wrap_longitude(x.reshape(-1), y.reshape(-1), ref) + x, y = x.reshape(shape_out), y.reshape(shape_out) + c = self.parse_varname(field) cmap = get_cmap(kwargs.pop("cmap", "Spectral_r")) cmin, cmax = kwargs.pop("vmin", c.min()), kwargs.pop("vmax", c.max()) @@ -2089,6 +2097,8 @@ def display_color(self, ax, field, intern=False, **kwargs): [create_vertice(i, j) for i, j in zip(x, y)], colors=colors, **kwargs ) ax.add_collection(lines) + lines.cmap = cmap + lines.norm = Normalize(vmin=cmin, vmax=cmax) return lines def display(self, ax, ref=None, extern_only=False, intern_only=False, **kwargs): From 9d408e55d5e44a874cff8774d8f26c2b98163664 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Fri, 14 Oct 2022 09:36:40 +0200 Subject: [PATCH 083/115] Add moving window in iter_on, prepare tag 3.6.1 --- CHANGELOG.rst | 13 +++ src/py_eddy_tracker/observations/network.py | 4 +- .../observations/observation.py | 79 +++++++++++-------- src/py_eddy_tracker/observations/tracking.py | 3 - 4 files changed, 60 insertions(+), 39 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index b8cad2f4..76ec911d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,12 +11,25 @@ and this project adheres to `Semantic Versioning Date: Tue, 18 Oct 2022 14:57:26 +0200 Subject: [PATCH 084/115] Modify remove dead end speed up extract_segment --- CHANGELOG.rst | 2 + src/py_eddy_tracker/observations/network.py | 123 ++++++++++++-------- 2 files changed, 76 insertions(+), 49 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 76ec911d..f8eee72f 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,6 +11,8 @@ and this project adheres to `Semantic Versioning big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network> big].sum()} observations cumulate)", - f" {nb_by_network[0]} observations in trash" + f" {nb_trash} observations in trash" ] return "\n".join(infos) @@ -369,26 +370,29 @@ def correct_close_events(self, nb_days_max=20): # we keep the real segment number seg_corrected_copy = segment_copy[seg_slice.stop - 1] + if i_seg_n == -1: + continue + # if segment is split n_seg = segment[i_seg_n] - # if segment is split - if i_seg_n != -1: - seg2_slice, i2_seg_p, i2_seg_n = segments_connexion[n_seg] - p2_seg = segment[i2_seg_p] - - # if it merges on the first in a certain time - if (p2_seg == seg_corrected) and ( - _time[i_seg_n] - _time[i2_seg_p] < nb_days_max - ): - my_slice = slice(i_seg_n, seg2_slice.stop) - # correct the factice segment - segment[my_slice] = seg_corrected - # correct the good segment - segment_copy[my_slice] = seg_corrected_copy - previous_obs[i_seg_n] = seg_slice.stop - 1 - - segments_connexion[seg_corrected][0] = my_slice + seg2_slice, i2_seg_p, _ = segments_connexion[n_seg] + if i2_seg_p == -1: + continue + p2_seg = segment[i2_seg_p] + + # if it merges on the first in a certain time + if (p2_seg == seg_corrected) and ( + _time[i_seg_n] - _time[i2_seg_p] < nb_days_max + ): + my_slice = slice(i_seg_n, seg2_slice.stop) + # correct the factice segment + segment[my_slice] = seg_corrected + # correct the good segment + segment_copy[my_slice] = seg_corrected_copy + previous_obs[i_seg_n] = seg_slice.stop - 1 + + segments_connexion[seg_corrected][0] = my_slice return self.sort() @@ -789,6 +793,8 @@ def display_timeline( colors_mode=colors_mode, ) ) + if field is not None: + field = self.parse_varname(field) for i, b0, b1 in self.iter_on("segment"): x = self.time[i] if x.shape[0] == 0: @@ -797,9 +803,9 @@ def display_timeline( y = b0 * ones(x.shape) else: if method == "all": - y = self[field][i] * factor + y = field[i] * factor else: - y = self[field][i].mean() * ones(x.shape) * factor + y = field[i].mean() * ones(x.shape) * factor if colors_mode == "roll": _color = self.get_color(j) @@ -825,7 +831,7 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol if field is not None and method != "all": for i, b0, _ in self.iter_on("segment"): - y = self[field][i] + y = self.parse_varname(field)[i] if y.shape[0] != 0: y_seg[b0] = y.mean() * factor mappables = dict() @@ -851,7 +857,7 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol y0 = b0 else: if method == "all": - y0 = self[field][i.stop - 1] * factor + y0 = self.parse_varname(field)[i.stop - 1] * factor else: y0 = y_seg[b0] if i_n != -1: @@ -860,7 +866,7 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol seg_next if field is None else ( - self[field][i_n] * factor + self.parse_varname(field)[i_n] * factor if method == "all" else y_seg[seg_next] ) @@ -876,7 +882,7 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol seg_previous if field is None else ( - self[field][i_p] * factor + self.parse_varname(field)[i_p] * factor if method == "all" else y_seg[seg_previous] ) @@ -1446,35 +1452,54 @@ def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): .. warning:: It will remove short segment that splits from then merges with the same segment """ - segments_keep = list() connexions = self.connexions(multi_network=True) - t = self.time - for i, b0, _ in self.iter_on(self.segment_track_array): - if mask and mask[i].any(): - segments_keep.append(b0) - continue - nb = i.stop - i.start - dt = t[i.stop - 1] - t[i.start] - if (nb < nobs or dt < ndays) and len(connexions.get(b0, tuple())) < 2: - continue - segments_keep.append(b0) + i0, i1, _ = self.index_segment_track + dt = self.time[i1 -1] - self.time[i0] + 1 + nb = i1 - i0 + m = (dt >= ndays) * (nb >= nobs) + nb_connexions = array([len(connexions.get(i, tuple())) for i in where(~m)[0]]) + m[~m] = nb_connexions >= 2 + segments_keep = where(m)[0] + if mask is not None: + segments_keep = unique(concatenate((segments_keep, self.segment_track_array[mask]))) + # get mask for selected obs + m = ~self.segment_mask(segments_keep) + self.track[m] = 0 + self.segment[m] = 0 + self.previous_obs[m] = -1 + self.previous_cost[m] = 0 + self.next_obs[m] = -1 + self.next_cost[m] = 0 + + m_previous = m[self.previous_obs] + self.previous_obs[m_previous] = -1 + self.previous_cost[m_previous] = 0 + m_next = m[self.next_obs] + self.next_obs[m_next] = -1 + self.next_cost[m_next] = 0 + + self.sort() if recursive > 0: - return self.extract_segment(segments_keep, absolute=True).remove_dead_end( - nobs, ndays, recursive - 1 - ) - return self.extract_segment(segments_keep, absolute=True) + self.remove_dead_end(nobs, ndays, recursive - 1) def extract_segment(self, segments, absolute=False): - mask = ones(self.shape, dtype="bool") - segments = array(segments) - values = self.segment_track_array if absolute else "segment" - keep = ones(values.max() + 1, dtype="bool") - v = unique(values) - keep[v] = in1d(v, segments) - for i, b0, b1 in self.iter_on(values): - if not keep[b0]: - mask[i] = False - return self.extract_with_mask(mask) + """Extract given segments + + :param array,tuple,list segments: list of segment to extract + :param bool absolute: keep for compatibility, defaults to False + :return NetworkObservations: Return observations from selected segment + """ + if not absolute: + raise Exception("Not implemented") + return self.extract_with_mask(self.segment_mask(segments)) + + def segment_mask(self, segments): + """Get mask from list of segment + + :param list,array segments: absolute id of segment + """ + return generate_mask_from_ids(array(segments), len(self), *self.index_segment_track) + def get_mask_with_period(self, period): """ From c455bc6e68a73cb4745eac157481bcb77760b012 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 18 Oct 2022 15:04:34 +0200 Subject: [PATCH 085/115] black & isort --- doc/conf.py | 14 +-- examples/06_grid_manipulation/pet_advect.py | 11 ++- examples/06_grid_manipulation/pet_lavd.py | 4 +- examples/07_cube_manipulation/pet_fsle_med.py | 4 +- .../pet_particles_drift.py | 4 +- examples/16_network/pet_follow_particle.py | 8 +- src/py_eddy_tracker/appli/network.py | 6 +- src/py_eddy_tracker/dataset/grid.py | 12 ++- src/py_eddy_tracker/observations/groups.py | 62 ++++++++++--- src/py_eddy_tracker/observations/network.py | 93 +++++++++++++------ .../observations/observation.py | 16 ++-- src/py_eddy_tracker/observations/tracking.py | 6 +- src/py_eddy_tracker/poly.py | 10 +- tests/test_generic.py | 4 +- 14 files changed, 181 insertions(+), 73 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index ccf26e4e..0844d585 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -96,9 +96,9 @@ master_doc = "index" # General information about the project. -project = u"py-eddy-tracker" -copyright = u"2019, A. Delepoulle & E. Mason" -author = u"A. Delepoulle & E. Mason" +project = "py-eddy-tracker" +copyright = "2019, A. Delepoulle & E. Mason" +author = "A. Delepoulle & E. Mason" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -272,8 +272,8 @@ ( master_doc, "py-eddy-tracker.tex", - u"py-eddy-tracker Documentation", - u"A. Delepoulle \\& E. Mason", + "py-eddy-tracker Documentation", + "A. Delepoulle \\& E. Mason", "manual", ), ] @@ -304,7 +304,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, "py-eddy-tracker", u"py-eddy-tracker Documentation", [author], 1) + (master_doc, "py-eddy-tracker", "py-eddy-tracker Documentation", [author], 1) ] # If true, show URL addresses after external links. @@ -320,7 +320,7 @@ ( master_doc, "py-eddy-tracker", - u"py-eddy-tracker Documentation", + "py-eddy-tracker Documentation", author, "py-eddy-tracker", "One line description of project.", diff --git a/examples/06_grid_manipulation/pet_advect.py b/examples/06_grid_manipulation/pet_advect.py index ab2a0e14..d7cc67e9 100644 --- a/examples/06_grid_manipulation/pet_advect.py +++ b/examples/06_grid_manipulation/pet_advect.py @@ -139,7 +139,9 @@ def update(i_frame, t_step): ) for time_step in (10800, 21600, 43200, 86400): x, y = x0.copy(), y0.copy() - kw_advect = dict(nb_step=int(50 * 86400 / time_step), time_step=time_step, u_name="u", v_name="v") + kw_advect = dict( + nb_step=int(50 * 86400 / time_step), time_step=time_step, u_name="u", v_name="v" + ) g.advect(x, y, **kw_advect).__next__() g.advect(x, y, **kw_advect, backward=True).__next__() d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5 @@ -158,7 +160,12 @@ def update(i_frame, t_step): time_step = 10800 for duration in (5, 50, 100): x, y = x0.copy(), y0.copy() - kw_advect = dict(nb_step=int(duration * 86400 / time_step), time_step=time_step, u_name="u", v_name="v") + kw_advect = dict( + nb_step=int(duration * 86400 / time_step), + time_step=time_step, + u_name="u", + v_name="v", + ) g.advect(x, y, **kw_advect).__next__() g.advect(x, y, **kw_advect, backward=True).__next__() d = ((x - x0) ** 2 + (y - y0) ** 2) ** 0.5 diff --git a/examples/06_grid_manipulation/pet_lavd.py b/examples/06_grid_manipulation/pet_lavd.py index 639db99e..a3ea846e 100644 --- a/examples/06_grid_manipulation/pet_lavd.py +++ b/examples/06_grid_manipulation/pet_lavd.py @@ -110,7 +110,9 @@ def save(self, *args, **kwargs): step_by_day = 3 # Compute step of advection every 4h nb_step = 2 -kw_p = dict(nb_step=nb_step, time_step=86400 / step_by_day / nb_step, u_name="u", v_name="v") +kw_p = dict( + nb_step=nb_step, time_step=86400 / step_by_day / nb_step, u_name="u", v_name="v" +) # Start a generator which at each iteration return new position at next time step particule = g.advect(x, y, **kw_p, rk4=True) diff --git a/examples/07_cube_manipulation/pet_fsle_med.py b/examples/07_cube_manipulation/pet_fsle_med.py index a949ec77..9d78ea02 100644 --- a/examples/07_cube_manipulation/pet_fsle_med.py +++ b/examples/07_cube_manipulation/pet_fsle_med.py @@ -142,7 +142,9 @@ def build_triplet(x, y, step=0.02): used = zeros(x.shape[0], dtype="bool") # advection generator -kw = dict(t_init=t0, nb_step=1, backward=backward, mask_particule=used, u_name="u", v_name="v") +kw = dict( + t_init=t0, nb_step=1, backward=backward, mask_particule=used, u_name="u", v_name="v" +) p = c.advect(x, y, time_step=86400 / time_step_by_days, **kw) # We check at each step of advection if particle distance is over `dist_max` diff --git a/examples/07_cube_manipulation/pet_particles_drift.py b/examples/07_cube_manipulation/pet_particles_drift.py index c61ced5b..3d7aa1a4 100644 --- a/examples/07_cube_manipulation/pet_particles_drift.py +++ b/examples/07_cube_manipulation/pet_particles_drift.py @@ -20,7 +20,7 @@ "longitude", "latitude", "time", - unset=True + unset=True, ) # %% @@ -43,4 +43,4 @@ ax.plot(x, y, lw=3) ax.set_title("10 days particle paths") ax.set_xlim(31, 35), ax.set_ylim(32, 34.5) -ax.grid() \ No newline at end of file +ax.grid() diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 356c7da4..9f5458eb 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -95,7 +95,13 @@ def save(self, *args, **kwargs): a.txt.set_position((25, 31)) step = 0.25 -kw_p = dict(nb_step=2, time_step=86400 * step * 0.5, t_init=t_snapshot - 2 * step, u_name="u", v_name="v") +kw_p = dict( + nb_step=2, + time_step=86400 * step * 0.5, + t_init=t_snapshot - 2 * step, + u_name="u", + v_name="v", +) mappables = dict() particules = c.advect(x, y, **kw_p) diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index f488168e..33d50b2a 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -54,7 +54,9 @@ def build_network(): memory=args.memory, ) group = n.group_observations( - min_overlap=args.min_overlap, minimal_area=args.minimal_area, hybrid_area=args.hybrid_area + min_overlap=args.min_overlap, + minimal_area=args.minimal_area, + hybrid_area=args.hybrid_area, ) n.build_dataset(group).write_file(filename=args.out) @@ -97,7 +99,7 @@ def divide_network(): window=args.window, min_overlap=args.min_overlap, minimal_area=args.minimal_area, - hybrid_area=args.hybrid_area + hybrid_area=args.hybrid_area, ), ) n.write_file(filename=args.out) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index c73f99d9..9345bf45 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -1980,7 +1980,15 @@ def interp(self, grid_name, lons, lats, method="bilinear"): self.x_c, self.y_c, g, m, lons, lats, nearest=method == "nearest" ) - def uv_for_advection(self, u_name=None, v_name=None, time_step=600, h_name=None, backward=False, factor=1): + def uv_for_advection( + self, + u_name=None, + v_name=None, + time_step=600, + h_name=None, + backward=False, + factor=1, + ): """ Get U,V to be used in degrees with precomputed time step @@ -1990,7 +1998,7 @@ def uv_for_advection(self, u_name=None, v_name=None, time_step=600, h_name=None, :param int time_step: Number of second for each advection """ if h_name is not None: - u_name, v_name = 'u', 'v' + u_name, v_name = "u", "v" if u_name not in self.vars: self.add_uv(h_name) self.vars.pop(h_name, None) diff --git a/src/py_eddy_tracker/observations/groups.py b/src/py_eddy_tracker/observations/groups.py index 54ae013c..81929e1e 100644 --- a/src/py_eddy_tracker/observations/groups.py +++ b/src/py_eddy_tracker/observations/groups.py @@ -351,7 +351,15 @@ def keep_tracks_by_date(self, date, nb_days): return self.extract_with_mask(mask) def particle_candidate_atlas( - self, cube, space_step, dt, start_intern=False, end_intern=False, callback_coherence=None, finalize_coherence=None, **kwargs + self, + cube, + space_step, + dt, + start_intern=False, + end_intern=False, + callback_coherence=None, + finalize_coherence=None, + **kwargs ): """Select particles within eddies, advect them, return target observation and associated percentages @@ -383,7 +391,9 @@ def particle_candidate_atlas( kw_coherence = dict(space_step=space_step, dt=dt, c=cube) kw_coherence.update(kwargs) for t in times: - logger.info("Coherence for time step : %s in [%s:%s]", t, times[0], times[-1]) + logger.info( + "Coherence for time step : %s in [%s:%s]", t, times[0], times[-1] + ) # Get index for origin i = t - t_start indexs0 = i_sort[i_start[i] : i_end[i]] @@ -393,7 +403,19 @@ def particle_candidate_atlas( if indexs0.size == 0 or indexs1.size == 0: continue - results.append(callback_coherence(self, i_target, pct, indexs0, indexs1, start_intern, end_intern, t_start=t, **kw_coherence)) + results.append( + callback_coherence( + self, + i_target, + pct, + indexs0, + indexs1, + start_intern, + end_intern, + t_start=t, + **kw_coherence + ) + ) indexs[results[-1]] = indexs0, indexs1 if finalize_coherence is not None: @@ -401,7 +423,17 @@ def particle_candidate_atlas( return i_target, pct @classmethod - def fill_coherence(cls, network, i_targets, percents, i_origin, i_end, start_intern, end_intern, **kwargs): + def fill_coherence( + cls, + network, + i_targets, + percents, + i_origin, + i_end, + start_intern, + end_intern, + **kwargs + ): """_summary_ :param array i_targets: global target @@ -412,21 +444,29 @@ def fill_coherence(cls, network, i_targets, percents, i_origin, i_end, start_int :param bool end_intern: Use intern or extern contour at end of advection """ # Get contour data - contours_start = [network[label][i_origin] for label in cls.intern(start_intern)] + contours_start = [ + network[label][i_origin] for label in cls.intern(start_intern) + ] contours_end = [network[label][i_end] for label in cls.intern(end_intern)] # Compute local coherence - i_local_targets, local_percents = particle_candidate_step(contours_start=contours_start, contours_end=contours_end,**kwargs) + i_local_targets, local_percents = particle_candidate_step( + contours_start=contours_start, contours_end=contours_end, **kwargs + ) # Store - cls.merge_particle_result(i_targets, percents, i_local_targets, local_percents, i_origin, i_end) - + cls.merge_particle_result( + i_targets, percents, i_local_targets, local_percents, i_origin, i_end + ) + @staticmethod - def merge_particle_result(i_targets, percents, i_local_targets, local_percents, i_origin, i_end): + def merge_particle_result( + i_targets, percents, i_local_targets, local_percents, i_origin, i_end + ): """Copy local result in merged result with global indexation :param array i_targets: global target - :param array percents: + :param array percents: :param array i_local_targets: local index target - :param array local_percents: + :param array local_percents: :param array i_origin: indices of origins :param array i_end: indices of ends """ diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index f11a180b..6b3102ed 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -16,15 +16,14 @@ bool_, concatenate, empty, - in1d, + nan, ones, + percentile, uint16, uint32, unique, where, zeros, - percentile, - nan ) import zarr @@ -112,9 +111,12 @@ class NetworkObservations(GroupEddiesObservations): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.reset_index() - + def __repr__(self): - m_event, s_event = self.merging_event(only_index=True, triplet=True)[0], self.splitting_event(only_index=True, triplet=True)[0] + m_event, s_event = ( + self.merging_event(only_index=True, triplet=True)[0], + self.splitting_event(only_index=True, triplet=True)[0], + ) period = (self.period[1] - self.period[0]) / 365.25 nb_by_network = self.network_size() nb_trash = 0 if self.ref_index != 0 else nb_by_network[0] @@ -124,7 +126,7 @@ def __repr__(self): f" {self.nb_segment} segments ({self.nb_segment / period:0.0f} segments/year), {len(self)} observations ({len(self) / period:0.0f} observations/year)", f" {m_event.size} merging ({m_event.size / period:0.0f} merging/year), {s_event.size} splitting ({s_event.size / period:0.0f} splitting/year)", f" with {(nb_by_network > big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network> big].sum()} observations cumulate)", - f" {nb_trash} observations in trash" + f" {nb_trash} observations in trash", ] return "\n".join(infos) @@ -332,7 +334,7 @@ def correct_close_events(self, nb_days_max=20): segment A splits from segment B then x days after segment A merges with B (B will be longer) These events have to last less than `nb_days_max` to be changed. - + ------------------- A / / B -------------------- @@ -528,12 +530,14 @@ def connexions(self, multi_network=False): self.only_one_network() segment = self.segment segments_connexion = dict() + def add_seg(s1, s2): if s1 not in segments_connexion: segments_connexion[s1] = set() if s2 not in segments_connexion: segments_connexion[s2] = set() segments_connexion[s1].add(s2), segments_connexion[s2].add(s1) + # Get index for each segment i0, i1, _ = self.index_segment_track i1 = i1 - 1 @@ -1122,6 +1126,7 @@ def birth_event(self): if self.first_is_trash(): indices = indices[1:] return self.extract_event(indices) + generation_event = birth_event def death_event(self): @@ -1131,6 +1136,7 @@ def death_event(self): if self.first_is_trash(): indices = indices[1:] return self.extract_event(indices) + dissipation_event = death_event def merging_event(self, triplet=False, only_index=False): @@ -1374,7 +1380,7 @@ def tag_segment(self): # No connexions, no need to explore if i not in c: sub_group[i] = j - j+= 1 + j += 1 continue # Skip if already set if sub_group[i] != 0: @@ -1384,10 +1390,8 @@ def tag_segment(self): j += 1 return sub_group - def fully_connected(self): - """Suspicious - """ + """Suspicious""" raise Exception("Must be check") self.only_one_network() return self.tag_segment().shape[0] == 1 @@ -1454,14 +1458,16 @@ def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): """ connexions = self.connexions(multi_network=True) i0, i1, _ = self.index_segment_track - dt = self.time[i1 -1] - self.time[i0] + 1 + dt = self.time[i1 - 1] - self.time[i0] + 1 nb = i1 - i0 m = (dt >= ndays) * (nb >= nobs) nb_connexions = array([len(connexions.get(i, tuple())) for i in where(~m)[0]]) m[~m] = nb_connexions >= 2 segments_keep = where(m)[0] if mask is not None: - segments_keep = unique(concatenate((segments_keep, self.segment_track_array[mask]))) + segments_keep = unique( + concatenate((segments_keep, self.segment_track_array[mask])) + ) # get mask for selected obs m = ~self.segment_mask(segments_keep) self.track[m] = 0 @@ -1470,14 +1476,14 @@ def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): self.previous_cost[m] = 0 self.next_obs[m] = -1 self.next_cost[m] = 0 - + m_previous = m[self.previous_obs] self.previous_obs[m_previous] = -1 self.previous_cost[m_previous] = 0 m_next = m[self.next_obs] self.next_obs[m_next] = -1 self.next_cost[m_next] = 0 - + self.sort() if recursive > 0: self.remove_dead_end(nobs, ndays, recursive - 1) @@ -1498,8 +1504,9 @@ def segment_mask(self, segments): :param list,array segments: absolute id of segment """ - return generate_mask_from_ids(array(segments), len(self), *self.index_segment_track) - + return generate_mask_from_ids( + array(segments), len(self), *self.index_segment_track + ) def get_mask_with_period(self, period): """ @@ -1849,7 +1856,7 @@ def date2file(julian_day): n_days=n_days, contour_start=contour_start, contour_end=contour_end, - **kwargs + **kwargs, ) logger.info( ( @@ -1974,7 +1981,7 @@ def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): :param bool minimal_area: If True, function will compute intersection/little polygon, else intersection/union, by default False :param float min_overlap: minimum overlap area to associate observations, by default 0.2 - :return: + :return: :rtype: TrackEddiesObservations """ @@ -1993,7 +2000,13 @@ def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): ii, ij = bbox_intersection(xi, yi, xj, yj) m = ( vertice_overlap( - xi[ii], yi[ii], xj[ij], yj[ij], minimal_area=minimal_area, min_overlap=min_overlap, **kwargs + xi[ii], + yi[ii], + xj[ij], + yj[ij], + minimal_area=minimal_area, + min_overlap=min_overlap, + **kwargs, ) != 0 ) @@ -2038,9 +2051,12 @@ def build_dataset(self, group, raw_data=True): print() eddies.track[new_i] = group return eddies - + + @njit(cache=True) -def get_percentile_on_following_obs(i, indexs, percents, follow_obs, t, segment, i_target, window, q=50, nb_min=1): +def get_percentile_on_following_obs( + i, indexs, percents, follow_obs, t, segment, i_target, window, q=50, nb_min=1 +): """Get stat on a part of segment close of an event :param int i: index to follow @@ -2070,8 +2086,22 @@ def get_percentile_on_following_obs(i, indexs, percents, follow_obs, t, segment, return nan return percentile(percent_target[:j], q) + @njit(cache=True) -def get_percentile_around_event(i, i1, i2, ind, pct, follow_obs, t, segment, window=10, follow_parent=False, q=50, nb_min=1): +def get_percentile_around_event( + i, + i1, + i2, + ind, + pct, + follow_obs, + t, + segment, + window=10, + follow_parent=False, + q=50, + nb_min=1, +): """Get stat around event :param array[int] i: Indexs of target @@ -2094,14 +2124,23 @@ def get_percentile_around_event(i, i1, i2, ind, pct, follow_obs, t, segment, win for j, (i_, i1_, i2_) in enumerate(zip(i, i1, i2)): if follow_parent: # We follow parent - stat1[j] = get_percentile_on_following_obs(i_, ind, pct, follow_obs, t, segment, i1_, window, q, nb_min) - stat2[j] = get_percentile_on_following_obs(i_, ind, pct, follow_obs, t, segment, i2_, window, q, nb_min) + stat1[j] = get_percentile_on_following_obs( + i_, ind, pct, follow_obs, t, segment, i1_, window, q, nb_min + ) + stat2[j] = get_percentile_on_following_obs( + i_, ind, pct, follow_obs, t, segment, i2_, window, q, nb_min + ) else: # We follow child - stat1[j] = get_percentile_on_following_obs(i1_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min) - stat2[j] = get_percentile_on_following_obs(i2_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min) + stat1[j] = get_percentile_on_following_obs( + i1_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min + ) + stat2[j] = get_percentile_on_following_obs( + i2_, ind, pct, follow_obs, t, segment, i_, window, q, nb_min + ) return stat1, stat2 + @njit(cache=True) def get_next_index(gr): """Return for each obs index the new position to join all groups""" diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index a7663345..df60474c 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -57,8 +57,8 @@ hist_numba, local_to_coordinates, reverse_index, - wrap_longitude, window_index, + wrap_longitude, ) from ..poly import ( bbox_intersection, @@ -448,9 +448,7 @@ def add_fields(self, fields=list(), array_fields=list()): ), track_array_variables=self.track_array_variables, array_variables=list(concatenate((self.array_variables, array_fields))), - only_variables=list( - concatenate((self.fields, fields, array_fields)) - ), + only_variables=list(concatenate((self.fields, fields, array_fields))), raw_data=self.raw_data, ) new.sign_type = self.sign_type @@ -591,7 +589,7 @@ def iter_on(self, xname, window=None, bins=None): x0 = arange(x.min(), x.max()) if bins is None else array(bins) i_ordered, first_index, last_index = window_index(x, x0, window) for x_, i0, i1 in zip(x0, first_index, last_index): - yield i_ordered[i0: i1], x_ - window, x_ + window + yield i_ordered[i0:i1], x_ - window, x_ + window else: d = x[1:] - x[:-1] if bins is None: @@ -1595,9 +1593,7 @@ def to_netcdf(self, handler, **kwargs): handler.track_array_variables = self.track_array_variables handler.array_variables = ",".join(self.array_variables) # Iter on variables to create: - fields_ = array( - [VAR_DESCR[field]["nc_name"] for field in self.fields] - ) + fields_ = array([VAR_DESCR[field]["nc_name"] for field in self.fields]) i = fields_.argsort() for ori_name in array(self.fields)[i]: # Patch for a transition @@ -1676,7 +1672,9 @@ def get_filters_zarr(name): content = VAR_DESCR.get(name) filters = list() store_dtype = content["output_type"] - scale_factor, add_offset = content.get("scale_factor", None), content.get("add_offset", None) + scale_factor, add_offset = content.get("scale_factor", None), content.get( + "add_offset", None + ) if scale_factor is not None or add_offset is not None: if add_offset is None: add_offset = 0 diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 152239cf..164f9724 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -385,9 +385,7 @@ def extract_toward_direction(self, west=True, delta_lon=None): return self.extract_with_mask(m) def extract_first_obs_in_box(self, res): - data = empty( - len(self), dtype=[("lon", "f4"), ("lat", "f4"), ("track", "i4")] - ) + data = empty(len(self), dtype=[("lon", "f4"), ("lat", "f4"), ("track", "i4")]) data["lon"] = self.longitude - self.longitude % res data["lat"] = self.latitude - self.latitude % res data["track"] = self.track @@ -757,7 +755,7 @@ def get_next_obs( time_ref, window, min_overlap=0.2, - **kwargs + **kwargs, ): """Forward association of observations to the segments""" time_max = time_e.shape[0] - 1 diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index deabd3ea..99d701db 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -431,7 +431,9 @@ def merge(x, y): return concatenate(x), concatenate(y) -def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False, hybrid_area=False, min_overlap=0): +def vertice_overlap( + x0, y0, x1, y1, minimal_area=False, p1_area=False, hybrid_area=False, min_overlap=0 +): r""" Return percent of overlap for each item. @@ -484,7 +486,11 @@ def vertice_overlap(x0, y0, x1, y1, minimal_area=False, p1_area=False, hybrid_ar if cost_ >= min_overlap: cost[i] = cost_ else: - if hybrid_area and cost_ != 0 and (intersection / min(p0_area_, p1_area_)) > .99: + if ( + hybrid_area + and cost_ != 0 + and (intersection / min(p0_area_, p1_area_)) > 0.99 + ): cost[i] = cost_ else: cost[i] = 0 diff --git a/tests/test_generic.py b/tests/test_generic.py index 29cb64b7..ee2d7881 100644 --- a/tests/test_generic.py +++ b/tests/test_generic.py @@ -33,7 +33,7 @@ def test_cumsum_by_track(): def test_wrapping(): - y = x = arange(-5,5, dtype='f4') + y = x = arange(-5, 5, dtype="f4") x_, _ = wrap_longitude(x, y, ref=-10) assert (x_ == x).all() x_, _ = wrap_longitude(x, y, ref=1) @@ -42,7 +42,7 @@ def test_wrapping(): assert (x_[:6] == x[:6] + 360).all() x_, _ = wrap_longitude(x, y, ref=1, cut=True) assert x.size + 3 == x_.size - assert (x_[6 + 3:] == x[6:]).all() + assert (x_[6 + 3 :] == x[6:]).all() assert (x_[:7] == x[:7] + 360).all() # FIXME Need evolution in wrap_longitude From 7fc19dfb1c439bd5ff19a544c6cf1aa2f0a6548d Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Wed, 18 Jan 2023 16:48:12 +0100 Subject: [PATCH 086/115] test python 3.10 binder (#186) Update setup.cfg --- environment.yml | 2 +- setup.cfg | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/environment.yml b/environment.yml index 4ea8f840..fcf7c4b2 100644 --- a/environment.yml +++ b/environment.yml @@ -3,7 +3,7 @@ channels: - conda-forge - defaults dependencies: - - python=3.8 + - python=3.10 - ffmpeg - pip: - -r requirements.txt diff --git a/setup.cfg b/setup.cfg index eb88b6f9..7e773ae8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -5,8 +5,8 @@ column_limit = 100 [flake8] max-line-length = 140 ignore = - E203, # whitespace before ':' - W503, # line break before binary operator + E203, + W503, exclude= build doc From 9085eacbbddb178025b456fe1e2137c64b1dacf0 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 8 Feb 2023 10:07:09 +0100 Subject: [PATCH 087/115] - Add period to cube - Add some methods for display - Speed up overlap --- README.md | 5 + src/py_eddy_tracker/appli/network.py | 5 + src/py_eddy_tracker/dataset/grid.py | 85 +++++--------- src/py_eddy_tracker/generic.py | 28 +---- src/py_eddy_tracker/observations/network.py | 58 +++++----- .../observations/observation.py | 107 +++++++----------- src/py_eddy_tracker/poly.py | 23 ++-- 7 files changed, 133 insertions(+), 178 deletions(-) diff --git a/README.md b/README.md index 98a16b62..0cc34894 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ [![PyPI version](https://badge.fury.io/py/pyEddyTracker.svg)](https://badge.fury.io/py/pyEddyTracker) +[![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.6333988.svg)](https://doi.org/10.5281/zenodo.6333988) [![Documentation Status](https://readthedocs.org/projects/py-eddy-tracker/badge/?version=stable)](https://py-eddy-tracker.readthedocs.io/en/stable/?badge=stable) [![Gitter](https://badges.gitter.im/py-eddy-tracker/community.svg)](https://gitter.im/py-eddy-tracker/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/AntSimi/py-eddy-tracker/master?urlpath=lab/tree/notebooks/python_module/) @@ -6,6 +7,10 @@ # README # +### How to cite code? ### + +Zenodo provide DOI for each tagged version, [all DOI are available here](https://doi.org/10.5281/zenodo.6333988) + ### Method ### Method was described in : diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index 33d50b2a..b8c2da51 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -128,6 +128,9 @@ def subset_network(): action="store_true", help="Remove trash (network id == 0)", ) + parser.add_argument( + "-i", "--ids", nargs="+", type=int, help="List of network which will be extract" + ) parser.add_argument( "-p", "--period", @@ -138,6 +141,8 @@ def subset_network(): ) args = parser.parse_args() n = NetworkObservations.load_file(args.input, raw_data=True) + if args.ids is not None: + n = n.networks(args.ids) if args.length is not None: n = n.longer_than(*args.length) if args.remove_dead_end is not None: diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 9345bf45..043a5244 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2,45 +2,20 @@ """ Class to load and manipulate RegularGrid and UnRegularGrid """ -from datetime import datetime import logging +from datetime import datetime from cv2 import filter2D from matplotlib.path import Path as BasePath from netCDF4 import Dataset -from numba import njit, prange, types as numba_types -from numpy import ( - arange, - array, - ceil, - concatenate, - cos, - deg2rad, - empty, - errstate, - exp, - float_, - floor, - histogram2d, - int_, - interp, - isnan, - linspace, - ma, - mean as np_mean, - meshgrid, - nan, - nanmean, - ones, - percentile, - pi, - radians, - round_, - sin, - sinc, - where, - zeros, -) +from numba import njit, prange +from numba import types as numba_types +from numpy import (arange, array, ceil, concatenate, cos, deg2rad, empty, + errstate, exp, float_, floor, histogram2d, int_, interp, + isnan, linspace, ma) +from numpy import mean as np_mean +from numpy import (meshgrid, nan, nanmean, ones, percentile, pi, radians, + round_, sin, sinc, where, zeros) from pint import UnitRegistry from scipy.interpolate import RectBivariateSpline, interp1d from scipy.ndimage import gaussian_filter @@ -49,26 +24,15 @@ from scipy.special import j1 from .. import VAR_DESCR +from ..data import get_demo_path from ..eddy_feature import Amplitude, Contours -from ..generic import ( - bbox_indice_regular, - coordinates_to_local, - distance, - interp2d_geo, - local_to_coordinates, - nearest_grd_indice, - uniform_resample, -) +from ..generic import (bbox_indice_regular, coordinates_to_local, distance, + interp2d_geo, local_to_coordinates, nearest_grd_indice, + uniform_resample) from ..observations.observation import EddiesObservations -from ..poly import ( - create_vertice, - fit_circle, - get_pixel_in_regular, - poly_area, - poly_contain_poly, - visvalingam, - winding_number_poly, -) +from ..poly import (create_vertice, fit_circle, get_pixel_in_regular, + poly_area, poly_contain_poly, visvalingam, + winding_number_poly) logger = logging.getLogger("pet") @@ -1318,9 +1282,13 @@ def compute_pixel_path(self, x0, y0, x1, y1): self.x_size, ) - def clean_land(self): + def clean_land(self, name): """Function to remove all land pixel""" - pass + mask_land = self.__class__(get_demo_path("mask_1_60.nc"), "lon", "lat") + x,y = meshgrid(self.x_c, self.y_c) + m = mask_land.interp('mask', x.reshape(-1), y.reshape(-1), 'nearest') + data = self.grid(name) + self.vars[name] = ma.array(data, mask=m.reshape(x.shape).T) def is_circular(self): """Check if the grid is circular""" @@ -2392,6 +2360,15 @@ def __iter__(self): for _, d in self.datasets: yield d + @property + def time(self): + return array([t for t, _ in self.datasets]) + + @property + def period(self): + t = self.time + return t.min(), t.max() + def __getitem__(self, item): for t, d in self.datasets: if t == item: diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index fbc17d07..29815acd 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -3,27 +3,11 @@ Tool method which use mostly numba """ -from numba import njit, prange, types as numba_types -from numpy import ( - absolute, - arcsin, - arctan2, - bool_, - cos, - empty, - floor, - histogram, - interp, - isnan, - linspace, - nan, - ones, - pi, - radians, - sin, - where, - zeros, -) +from numba import njit, prange +from numba import types as numba_types +from numpy import (absolute, arcsin, arctan2, bool_, cos, empty, floor, + histogram, interp, isnan, linspace, nan, ones, pi, radians, + sin, where, zeros) @njit(cache=True) @@ -426,7 +410,7 @@ def split_line(x, y, i): """ nb_jump = len(where(i[1:] - i[:-1] != 0)[0]) nb_value = x.shape[0] - final_size = (nb_jump - 1) + nb_value + final_size = nb_jump + nb_value new_x = empty(final_size, dtype=x.dtype) new_y = empty(final_size, dtype=y.dtype) new_j = 0 diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 6b3102ed..4ffed94c 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -2,37 +2,26 @@ """ Class to create network of observations """ -from glob import glob import logging import time +from glob import glob import netCDF4 -from numba import njit, types as nb_types -from numba.typed import List -from numpy import ( - arange, - array, - bincount, - bool_, - concatenate, - empty, - nan, - ones, - percentile, - uint16, - uint32, - unique, - where, - zeros, -) import zarr +from numba import njit +from numba import types as nb_types +from numba.typed import List +from numpy import (arange, array, bincount, bool_, concatenate, empty, nan, + ones, percentile, uint16, uint32, unique, where, zeros) from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap -from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate +from .groups import (GroupEddiesObservations, get_missing_indices, + particle_candidate) from .observation import EddiesObservations -from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter +from .tracking import (TrackEddiesObservations, track_loess_filter, + track_median_filter) logger = logging.getLogger("pet") @@ -280,6 +269,15 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1): """ Select network on time duration + :param int nb_day_min: Minimal number of days covered by one network, if negative -> not used + :param int nb_day_max: Maximal number of days covered by one network, if negative -> not used + """ + return self.extract_with_mask(self.mask_longer_than(nb_day_min, nb_day_max)) + + def mask_longer_than(self, nb_day_min=-1, nb_day_max=-1): + """ + Select network on time duration + :param int nb_day_min: Minimal number of days covered by one network, if negative -> not used :param int nb_day_max: Maximal number of days covered by one network, if negative -> not used """ @@ -293,7 +291,7 @@ def longer_than(self, nb_day_min=-1, nb_day_max=-1): continue if nb_day_min <= (ptp(t[i]) + 1) <= nb_day_max: mask[i] = True - return self.extract_with_mask(mask) + return mask @classmethod def from_split_network(cls, group_dataset, indexs, **kwargs): @@ -800,7 +798,7 @@ def display_timeline( if field is not None: field = self.parse_varname(field) for i, b0, b1 in self.iter_on("segment"): - x = self.time[i] + x = self.time_datetime64[i] if x.shape[0] == 0: continue if field is None: @@ -831,7 +829,7 @@ def event_timeline(self, ax, field=None, method=None, factor=1, colors_mode="rol # TODO : fill mappables dict y_seg = dict() - _time = self.time + _time = self.time_datetime64 if field is not None and method != "all": for i, b0, _ in self.iter_on("segment"): @@ -1011,7 +1009,7 @@ def scatter_timeline( if "c" not in kwargs: v = self.parse_varname(name) kwargs["c"] = v * factor - mappables["scatter"] = ax.scatter(self.time, y, **kwargs) + mappables["scatter"] = ax.scatter(self.time_datetime64, y, **kwargs) return mappables def event_map(self, ax, **kwargs): @@ -1244,7 +1242,7 @@ def networks_mask(self, id_networks, segment=False): def networks(self, id_networks): return self.extract_with_mask( - generate_mask_from_ids(id_networks, self.track.size, *self.index_network) + generate_mask_from_ids(array(id_networks), self.track.size, *self.index_network) ) @property @@ -1423,10 +1421,10 @@ def plot(self, ax, ref=None, color_cycle=None, **kwargs): :param dict kwargs: keyword arguments for Axes.plot :return: a list of matplotlib mappables """ - nb_colors = 0 - if color_cycle is not None: - kwargs = kwargs.copy() - nb_colors = len(color_cycle) + kwargs = kwargs.copy() + if color_cycle is None: + color_cycle = self.COLORS + nb_colors = len(color_cycle) mappables = list() if "label" in kwargs: kwargs["label"] = self.format_label(kwargs["label"]) diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index df60474c..72031608 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2,76 +2,38 @@ """ Base class to manage eddy observation """ +import logging from datetime import datetime from io import BufferedReader, BytesIO -import logging from tarfile import ExFileObject from tokenize import TokenError -from Polygon import Polygon +import packaging.version +import zarr from matplotlib.cm import get_cmap from matplotlib.collections import LineCollection, PolyCollection from matplotlib.colors import Normalize from netCDF4 import Dataset -from numba import njit, types as numba_types -from numpy import ( - absolute, - arange, - array, - array_equal, - ceil, - concatenate, - cos, - digitize, - empty, - errstate, - floor, - histogram, - histogram2d, - in1d, - isnan, - linspace, - ma, - nan, - ndarray, - ones, - percentile, - radians, - sin, - unique, - where, - zeros, -) -import packaging.version +from numba import njit +from numba import types as numba_types +from numpy import (absolute, arange, array, array_equal, ceil, concatenate, + cos, datetime64, digitize, empty, errstate, floor, + histogram, histogram2d, in1d, isnan, linspace, ma, nan, + ndarray, ones, percentile, radians, sin, unique, where, + zeros) from pint import UnitRegistry from pint.errors import UndefinedUnitError -import zarr +from Polygon import Polygon from .. import VAR_DESCR, VAR_DESCR_inv, __version__ -from ..generic import ( - bbox_indice_regular, - build_index, - distance, - distance_grid, - flatten_line_matrix, - hist_numba, - local_to_coordinates, - reverse_index, - window_index, - wrap_longitude, -) -from ..poly import ( - bbox_intersection, - close_center, - convexs, - create_meshed_particles, - create_vertice, - get_pixel_in_regular, - insidepoly, - poly_indexs, - reduce_size, - vertice_overlap, -) +from ..generic import (bbox_indice_regular, build_index, distance, + distance_grid, flatten_line_matrix, hist_numba, + local_to_coordinates, reverse_index, window_index, + wrap_longitude) +from ..poly import (bbox_intersection, close_center, convexs, + create_meshed_particles, create_vertice, + get_pixel_in_regular, insidepoly, poly_indexs, reduce_size, + vertice_overlap) logger = logging.getLogger("pet") @@ -1844,6 +1806,11 @@ def extract_with_area(self, area, **kwargs): mask *= (lon > lon0) * (lon < area["urcrnrlon"]) return self.extract_with_mask(mask, **kwargs) + @property + def time_datetime64(self): + dt = (datetime64('1970-01-01') - datetime64('1950-01-01')).astype('i8') + return (self.time - dt).astype('datetime64[D]') + def time_sub_sample(self, t0, time_step): """ Time sub sampling @@ -2351,7 +2318,7 @@ def grid_stat(self, bins, varname, data=None): return regular_grid def interp_grid( - self, grid_object, varname, method="center", dtype=None, intern=None + self, grid_object, varname, i=None, method="center", dtype=None, intern=None ): """ Interpolate a grid on a center or contour with mean, min or max method @@ -2359,6 +2326,8 @@ def interp_grid( :param grid_object: Handler of grid to interp :type grid_object: py_eddy_tracker.dataset.grid.RegularGridDataset :param str varname: Name of variable to use + :param array[bool,int],None i: + Index or mask to subset observations, it could avoid to build a specific dataset. :param str method: 'center', 'mean', 'max', 'min', 'nearest' :param str dtype: if None we use var dtype :param bool intern: Use extern or intern contour @@ -2366,19 +2335,25 @@ def interp_grid( .. minigallery:: py_eddy_tracker.EddiesObservations.interp_grid """ if method in ("center", "nearest"): - return grid_object.interp(varname, self.longitude, self.latitude, method) + x, y = self.longitude, self.latitude + if i is not None: + x, y = x[i], y[i] + return grid_object.interp(varname, x,y , method) elif method in ("min", "max", "mean", "count"): x0 = grid_object.x_bounds[0] x_name, y_name = self.intern(False if intern is None else intern) x_ref = ((self.longitude - x0) % 360 + x0 - 180).reshape(-1, 1) x, y = (self[x_name] - x_ref) % 360 + x_ref, self[y_name] + if i is not None: + x, y = x[i], y[i] grid = grid_object.grid(varname) - result = empty(self.shape, dtype=grid.dtype if dtype is None else dtype) + result = empty(x.shape[0], dtype=grid.dtype if dtype is None else dtype) min_method = method == "min" grid_stat( grid_object.x_c, grid_object.y_c, -grid if min_method else grid, + grid.mask, x, y, result, @@ -2545,13 +2520,14 @@ def grid_box_stat(x_c, y_c, grid, mask, x, y, value, circular=False, method=50): @njit(cache=True) -def grid_stat(x_c, y_c, grid, x, y, result, circular=False, method="mean"): +def grid_stat(x_c, y_c, grid, mask, x, y, result, circular=False, method="mean"): """ Compute the mean or the max of the grid for each contour :param array_like x_c: the grid longitude coordinates :param array_like y_c: the grid latitude coordinates :param array_like grid: grid value + :param array[bool] mask: mask for invalid value :param array_like x: longitude of contours :param array_like y: latitude of contours :param array_like result: return values @@ -2577,9 +2553,12 @@ def grid_stat(x_c, y_c, grid, x, y, result, circular=False, method="mean"): result[elt] = i.shape[0] elif mean_method: v_sum = 0 + nb_ = 0 for i_, j_ in zip(i, j): + if mask[i_, j_]: + continue v_sum += grid[i_, j_] - nb_ = i.shape[0] + nb_ += 1 # FIXME : how does it work on grid bound, if nb_ == 0: result[elt] = nan @@ -2588,7 +2567,9 @@ def grid_stat(x_c, y_c, grid, x, y, result, circular=False, method="mean"): elif max_method: v_max = -1e40 for i_, j_ in zip(i, j): - v_max = max(v_max, grid[i_, j_]) + values = grid[i_, j_] + # FIXME must use mask + v_max = max(v_max, values) result[elt] = v_max diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 99d701db..217b1d18 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -5,10 +5,12 @@ import heapq -from Polygon import Polygon -from numba import njit, prange, types as numba_types -from numpy import arctan, array, concatenate, empty, nan, ones, pi, where, zeros +from numba import njit, prange +from numba import types as numba_types +from numpy import (arctan, array, concatenate, empty, nan, ones, pi, where, + zeros) from numpy.linalg import lstsq +from Polygon import Polygon from .generic import build_index @@ -278,7 +280,10 @@ def close_center(x0, y0, x1, y1, delta=0.1): for i0 in range(nb0): xi0, yi0 = x0[i0], y0[i0] for i1 in range(nb1): - if abs(x1[i1] - xi0) > delta: + d_x = x1[i1] - xi0 + if abs(d_x) > 180: + d_x = (d_x + 180) % 360 - 180 + if abs(d_x) > delta: continue if abs(y1[i1] - yi0) > delta: continue @@ -474,22 +479,22 @@ def vertice_overlap( if intersection == 0: cost[i] = 0 continue - p0_area_, p1_area_ = p0.area(), p1.area() + p0_area, p1_area = p0.area(), p1.area() if minimal_area: - cost_ = intersection / min(p0_area_, p1_area_) + cost_ = intersection / min(p0_area, p1_area) # we divide intersection with p1 elif p1_area: - cost_ = intersection / p1_area_ + cost_ = intersection / p1_area # we divide intersection with polygon merging result from 0 to 1 else: - cost_ = intersection / (p0_area_ + p1_area_ - intersection) + cost_ = intersection / (p0_area + p1_area - intersection) if cost_ >= min_overlap: cost[i] = cost_ else: if ( hybrid_area and cost_ != 0 - and (intersection / min(p0_area_, p1_area_)) > 0.99 + and (intersection / min(p0_area, p1_area)) > 0.99 ): cost[i] = cost_ else: From e7d138de2f2e6718c4cd4d461c31a58a5b169271 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 4 May 2023 10:52:06 +0200 Subject: [PATCH 088/115] change version for module --- doc/environment.yml | 1 + environment.yml | 1 + requirements.txt | 8 ++++---- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/doc/environment.yml b/doc/environment.yml index 9d882911..89fcbe9c 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -4,6 +4,7 @@ channels: dependencies: - python=3.10 - ffmpeg + - pip - pip: - sphinx-gallery - sphinx_rtd_theme diff --git a/environment.yml b/environment.yml index fcf7c4b2..12ce70e7 100644 --- a/environment.yml +++ b/environment.yml @@ -4,6 +4,7 @@ channels: - defaults dependencies: - python=3.10 + - pip - ffmpeg - pip: - -r requirements.txt diff --git a/requirements.txt b/requirements.txt index 497344e6..4c8af099 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ matplotlib opencv-python -pint==0.18 +pint polygon3 pyyaml requests scipy zarr -netCDF4<1.6 -numpy<1.23 -numba<0.56 \ No newline at end of file +netCDF4 +numpy +numba \ No newline at end of file From 17288159048ffbb599ca1287cb53bbc29f94f272 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 4 May 2023 11:10:51 +0200 Subject: [PATCH 089/115] minimal version of python : 3.10 --- .github/workflows/python-app.yml | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index bbc0662c..00dbcc95 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -11,7 +11,7 @@ jobs: matrix: # os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, windows-latest] - python_version: [3.7, 3.8, 3.9, '3.10'] + python_version: ['3.10'] name: Run py eddy tracker build tests runs-on: ${{ matrix.os }} defaults: diff --git a/setup.py b/setup.py index 6b18bcbb..7b836763 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( name="pyEddyTracker", - python_requires=">=3.7", + python_requires=">=3.10", version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description="Py-Eddy-Tracker libraries", From 1b9ab25576a5e96b9987374687dd5436f93b7ed9 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 4 May 2023 12:25:47 +0200 Subject: [PATCH 090/115] numba correction with masked array --- doc/spectrum.rst | 1 - examples/16_network/pet_atlas.py | 4 +- examples/16_network/pet_follow_particle.py | 3 +- examples/16_network/pet_relative.py | 8 +- .../16_network/pet_replay_segmentation.py | 1 - src/py_eddy_tracker/dataset/grid.py | 118 +++++++++++++----- .../old_tracker_reference.py | 1 - src/py_eddy_tracker/generic.py | 32 +++-- src/py_eddy_tracker/observations/network.py | 83 +++++++++--- .../observations/observation.py | 87 +++++++++---- src/py_eddy_tracker/poly.py | 8 +- src/py_eddy_tracker/tracking.py | 4 +- 12 files changed, 256 insertions(+), 94 deletions(-) diff --git a/doc/spectrum.rst b/doc/spectrum.rst index 5a42cbec..f96e30a0 100644 --- a/doc/spectrum.rst +++ b/doc/spectrum.rst @@ -28,7 +28,6 @@ Compute and display spectrum ax.set_title("Spectrum") ax.set_xlabel("km") for name_area, area in areas.items(): - lon_spec, lat_spec = raw.spectrum_lonlat("adt", area=area) mappable = ax.loglog(*lat_spec, label="lat %s raw" % name_area)[0] ax.loglog( diff --git a/examples/16_network/pet_atlas.py b/examples/16_network/pet_atlas.py index 6927f169..48b374e2 100644 --- a/examples/16_network/pet_atlas.py +++ b/examples/16_network/pet_atlas.py @@ -129,7 +129,9 @@ def update_axes(ax, mappable=None): # Merging in networks longer than 10 days, with dead end remove (shorter than 10 observations) # -------------------------------------------------------------------------------------------- ax = start_axes("") -merger = n10.remove_dead_end(nobs=10).merging_event() +n10_ = n10.copy() +n10_.remove_dead_end(nobs=10) +merger = n10_.merging_event() g_10_merging = merger.grid_count(bins) m = g_10_merging.display(ax, **kw_time, vmin=0, vmax=1) update_axes(ax, m).set_label("Pixel used in % of time") diff --git a/examples/16_network/pet_follow_particle.py b/examples/16_network/pet_follow_particle.py index 9f5458eb..6815fb6e 100644 --- a/examples/16_network/pet_follow_particle.py +++ b/examples/16_network/pet_follow_particle.py @@ -41,7 +41,8 @@ def save(self, *args, **kwargs): # %% n = NetworkObservations.load_file(get_demo_path("network_med.nc")).network(651) n = n.extract_with_mask((n.time >= 20180) * (n.time <= 20269)) -n = n.remove_dead_end(nobs=0, ndays=10) +n.remove_dead_end(nobs=0, ndays=10) +n = n.remove_trash() n.numbering_segment() c = GridCollection.from_netcdf_cube( get_demo_path("dt_med_allsat_phy_l4_2005T2.nc"), diff --git a/examples/16_network/pet_relative.py b/examples/16_network/pet_relative.py index f5e8bc92..dd97b538 100644 --- a/examples/16_network/pet_relative.py +++ b/examples/16_network/pet_relative.py @@ -127,7 +127,9 @@ # Remove dead branch # ------------------ # Remove all tiny segments with less than N obs which didn't join two segments -n_clean = n.remove_dead_end(nobs=5, ndays=10) +n_clean = n.copy() +n_clean.remove_dead_end(nobs=5, ndays=10) +n_clean = n_clean.remove_trash() fig = plt.figure(figsize=(15, 12)) ax = fig.add_axes([0.04, 0.54, 0.90, 0.40]) ax.set_title(f"Original network ({n.infos()})") @@ -261,7 +263,9 @@ # -------------------- # Get a simplified network -n = n2.remove_dead_end(nobs=50, recursive=1) +n = n2.copy() +n.remove_dead_end(nobs=50, recursive=1) +n = n.remove_trash() n.numbering_segment() # %% # Only a map can be tricky to understand, with a timeline it's easier! diff --git a/examples/16_network/pet_replay_segmentation.py b/examples/16_network/pet_replay_segmentation.py index ecb0970d..d909af7f 100644 --- a/examples/16_network/pet_replay_segmentation.py +++ b/examples/16_network/pet_replay_segmentation.py @@ -163,7 +163,6 @@ def get_obs(dataset): for b0, b1 in [ (datetime(i, 1, 1), datetime(i, 12, 31)) for i in (2004, 2005, 2006, 2007, 2008) ]: - ref, delta = datetime(1950, 1, 1), 20 b0_, b1_ = (b0 - ref).days, (b1 - ref).days ax = timeline_axes() diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 043a5244..7e9a04be 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -2,20 +2,45 @@ """ Class to load and manipulate RegularGrid and UnRegularGrid """ -import logging from datetime import datetime +import logging from cv2 import filter2D from matplotlib.path import Path as BasePath from netCDF4 import Dataset -from numba import njit, prange -from numba import types as numba_types -from numpy import (arange, array, ceil, concatenate, cos, deg2rad, empty, - errstate, exp, float_, floor, histogram2d, int_, interp, - isnan, linspace, ma) -from numpy import mean as np_mean -from numpy import (meshgrid, nan, nanmean, ones, percentile, pi, radians, - round_, sin, sinc, where, zeros) +from numba import njit, prange, types as numba_types +from numpy import ( + arange, + array, + ceil, + concatenate, + cos, + deg2rad, + empty, + errstate, + exp, + float_, + floor, + histogram2d, + int_, + interp, + isnan, + linspace, + ma, + mean as np_mean, + meshgrid, + nan, + nanmean, + ones, + percentile, + pi, + radians, + round_, + sin, + sinc, + where, + zeros, +) from pint import UnitRegistry from scipy.interpolate import RectBivariateSpline, interp1d from scipy.ndimage import gaussian_filter @@ -26,13 +51,25 @@ from .. import VAR_DESCR from ..data import get_demo_path from ..eddy_feature import Amplitude, Contours -from ..generic import (bbox_indice_regular, coordinates_to_local, distance, - interp2d_geo, local_to_coordinates, nearest_grd_indice, - uniform_resample) +from ..generic import ( + bbox_indice_regular, + coordinates_to_local, + distance, + interp2d_geo, + local_to_coordinates, + nearest_grd_indice, + uniform_resample, +) from ..observations.observation import EddiesObservations -from ..poly import (create_vertice, fit_circle, get_pixel_in_regular, - poly_area, poly_contain_poly, visvalingam, - winding_number_poly) +from ..poly import ( + create_vertice, + fit_circle, + get_pixel_in_regular, + poly_area, + poly_contain_poly, + visvalingam, + winding_number_poly, +) logger = logging.getLogger("pet") @@ -86,7 +123,7 @@ def value_on_regular_contour(x_g, y_g, z_g, m_g, vertices, num_fac=2, fixed_size @njit(cache=True) def mean_on_regular_contour( - x_g, y_g, z_g, m_g, vertices, num_fac=2, fixed_size=None, nan_remove=False + x_g, y_g, z_g, m_g, vertices, num_fac=2, fixed_size=-1, nan_remove=False ): x_val, y_val = vertices[:, 0], vertices[:, 1] x_new, y_new = uniform_resample(x_val, y_val, num_fac, fixed_size) @@ -406,8 +443,8 @@ def setup_coordinates(self): x_name, y_name = self.coordinates if self.is_centered: # logger.info("Grid center") - self.x_c = self.vars[x_name].astype("float64") - self.y_c = self.vars[y_name].astype("float64") + self.x_c = array(self.vars[x_name].astype("float64")) + self.y_c = array(self.vars[y_name].astype("float64")) self.x_bounds = concatenate((self.x_c, (2 * self.x_c[-1] - self.x_c[-2],))) self.y_bounds = concatenate((self.y_c, (2 * self.y_c[-1] - self.y_c[-2],))) @@ -419,8 +456,8 @@ def setup_coordinates(self): self.y_bounds[-1] -= d_y[-1] / 2 else: - self.x_bounds = self.vars[x_name].astype("float64") - self.y_bounds = self.vars[y_name].astype("float64") + self.x_bounds = array(self.vars[x_name].astype("float64")) + self.y_bounds = array(self.vars[y_name].astype("float64")) if len(self.x_dim) == 1: self.x_c = self.x_bounds.copy() @@ -757,7 +794,7 @@ def eddy_identification( # Test of the rotating sense: cyclone or anticyclone if has_value( - data, i_x_in, i_y_in, cvalues, below=anticyclonic_search + data.data, i_x_in, i_y_in, cvalues, below=anticyclonic_search ): continue @@ -788,7 +825,6 @@ def eddy_identification( contour.reject = 4 continue if reset_centroid: - if self.is_circular(): centi = self.normalize_x_indice(reset_centroid[0]) else: @@ -1285,8 +1321,8 @@ def compute_pixel_path(self, x0, y0, x1, y1): def clean_land(self, name): """Function to remove all land pixel""" mask_land = self.__class__(get_demo_path("mask_1_60.nc"), "lon", "lat") - x,y = meshgrid(self.x_c, self.y_c) - m = mask_land.interp('mask', x.reshape(-1), y.reshape(-1), 'nearest') + x, y = meshgrid(self.x_c, self.y_c) + m = mask_land.interp("mask", x.reshape(-1), y.reshape(-1), "nearest") data = self.grid(name) self.vars[name] = ma.array(data, mask=m.reshape(x.shape).T) @@ -1310,7 +1346,7 @@ def get_step_in_km(self, lat, wave_length): min_wave_length = max(step_x_km, step_y_km) * 2 if wave_length < min_wave_length: logger.error( - "wave_length too short for resolution, must be > %d km", + "Wave_length too short for resolution, must be > %d km", ceil(min_wave_length), ) raise Exception() @@ -1361,6 +1397,24 @@ def kernel_lanczos(self, lat, wave_length, order=1): kernel[dist_norm > order] = 0 return self.finalize_kernel(kernel, order, half_x_pt, half_y_pt) + def kernel_loess(self, lat, wave_length, order=1): + """ + https://fr.wikipedia.org/wiki/R%C3%A9gression_locale + """ + order = self.check_order(order) + half_x_pt, half_y_pt, dist_norm = self.estimate_kernel_shape( + lat, wave_length, order + ) + + def inc_func(xdist): + f = zeros(xdist.size) + f[abs(xdist) < 1] = 1 + return f + + kernel = (1 - abs(dist_norm) ** 3) ** 3 + kernel[abs(dist_norm) > order] = 0 + return self.finalize_kernel(kernel, order, half_x_pt, half_y_pt) + def kernel_bessel(self, lat, wave_length, order=1): """wave_length in km order must be int @@ -1638,11 +1692,13 @@ def compute_finite_difference(self, data, schema=1, mode="reflect", vertical=Fal data1[-schema:] = nan data2[:schema] = nan - d = self.EARTH_RADIUS * 2 * pi / 360 * 2 * schema + # Distance for one degree + d = self.EARTH_RADIUS * 2 * pi / 360 + # Mulitply by 2 step if vertical: - d *= self.ystep + d *= self.ystep * 2 * schema else: - d *= self.xstep * cos(deg2rad(self.y_c)) + d *= self.xstep * cos(deg2rad(self.y_c)) * 2 * schema return (data1 - data2) / d def compute_stencil( @@ -1855,7 +1911,7 @@ def speed_coef_mean(self, contour): return mean_on_regular_contour( self.x_c, self.y_c, - self._speed_ev, + self._speed_ev.data, self._speed_ev.mask, contour.vertices, nan_remove=True, @@ -1945,7 +2001,7 @@ def interp(self, grid_name, lons, lats, method="bilinear"): g = self.grid(grid_name) m = self.get_mask(g) return interp2d_geo( - self.x_c, self.y_c, g, m, lons, lats, nearest=method == "nearest" + self.x_c, self.y_c, g.data, m, lons, lats, nearest=method == "nearest" ) def uv_for_advection( @@ -1981,7 +2037,7 @@ def uv_for_advection( u = -u v = -v m = u.mask + v.mask - return u, v, m + return u.data, v.data, m def advect(self, x, y, u_name, v_name, nb_step=10, rk4=True, **kw): """ diff --git a/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py b/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py index 41e02db9..b0d4abfa 100644 --- a/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py +++ b/src/py_eddy_tracker/featured_tracking/old_tracker_reference.py @@ -8,7 +8,6 @@ class CheltonTracker(Model): - __slots__ = tuple() GROUND = RegularGridDataset( diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 29815acd..612def68 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -3,11 +3,27 @@ Tool method which use mostly numba """ -from numba import njit, prange -from numba import types as numba_types -from numpy import (absolute, arcsin, arctan2, bool_, cos, empty, floor, - histogram, interp, isnan, linspace, nan, ones, pi, radians, - sin, where, zeros) +from numba import njit, prange, types as numba_types +from numpy import ( + absolute, + arcsin, + arctan2, + bool_, + cos, + empty, + floor, + histogram, + interp, + isnan, + linspace, + nan, + ones, + pi, + radians, + sin, + where, + zeros, +) @njit(cache=True) @@ -285,14 +301,14 @@ def interp2d_bilinear(x_g, y_g, z_g, m_g, x, y): @njit(cache=True, fastmath=True) -def uniform_resample(x_val, y_val, num_fac=2, fixed_size=None): +def uniform_resample(x_val, y_val, num_fac=2, fixed_size=-1): """ Resample contours to have (nearly) equal spacing. :param array_like x_val: input x contour coordinates :param array_like y_val: input y contour coordinates :param int num_fac: factor to increase lengths of output coordinates - :param int,None fixed_size: if defined, will be used to set sampling + :param int fixed_size: if > -1, will be used to set sampling """ nb = x_val.shape[0] # Get distances @@ -303,7 +319,7 @@ def uniform_resample(x_val, y_val, num_fac=2, fixed_size=None): dist[1:][dist[1:] < 1e-3] = 1e-3 dist = dist.cumsum() # Get uniform distances - if fixed_size is None: + if fixed_size == -1: fixed_size = dist.size * num_fac d_uniform = linspace(0, dist[-1], fixed_size) x_new = interp(d_uniform, dist, x_val) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 4ffed94c..a2e2daed 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -2,26 +2,37 @@ """ Class to create network of observations """ +from glob import glob import logging import time -from glob import glob import netCDF4 -import zarr -from numba import njit -from numba import types as nb_types +from numba import njit, types as nb_types from numba.typed import List -from numpy import (arange, array, bincount, bool_, concatenate, empty, nan, - ones, percentile, uint16, uint32, unique, where, zeros) +from numpy import ( + arange, + array, + bincount, + bool_, + concatenate, + empty, + nan, + ones, + percentile, + uint16, + uint32, + unique, + where, + zeros, +) +import zarr from ..dataset.grid import GridCollection from ..generic import build_index, wrap_longitude from ..poly import bbox_intersection, vertice_overlap -from .groups import (GroupEddiesObservations, get_missing_indices, - particle_candidate) +from .groups import GroupEddiesObservations, get_missing_indices, particle_candidate from .observation import EddiesObservations -from .tracking import (TrackEddiesObservations, track_loess_filter, - track_median_filter) +from .tracking import TrackEddiesObservations, track_loess_filter, track_median_filter logger = logging.getLogger("pet") @@ -93,7 +104,6 @@ def fix_next_previous_obs(next_obs, previous_obs, flag_virtual): class NetworkObservations(GroupEddiesObservations): - __slots__ = ("_index_network", "_index_segment_track", "_segment_track_array") NOGROUP = 0 @@ -465,7 +475,6 @@ def find_link(self, i_observations, forward=True, backward=False): segments_connexion[seg][0] = i_slice if i_p != -1: - if p_seg not in segments_connexion: segments_connexion[p_seg] = [None, [], []] @@ -614,7 +623,6 @@ def relatives(self, obs, order=2): segments_connexion[seg][0] = i_slice if i_p != -1: - if p_seg not in segments_connexion: segments_connexion[p_seg] = [None, []] @@ -1242,7 +1250,9 @@ def networks_mask(self, id_networks, segment=False): def networks(self, id_networks): return self.extract_with_mask( - generate_mask_from_ids(array(id_networks), self.track.size, *self.index_network) + generate_mask_from_ids( + array(id_networks), self.track.size, *self.index_network + ) ) @property @@ -1638,7 +1648,6 @@ def analysis_coherence( correct_close_events=0, remove_dead_end=0, ): - """Global function to analyse segments coherence, with network preprocessing. :param callable date_function: python function, takes as param `int` (julian day) and return data filename associated to the date @@ -1719,7 +1728,6 @@ def segment_coherence_backward( contour_start="speed", contour_end="speed", ): - """ Percentage of particules and their targets after backward advection from a specific eddy. @@ -1797,7 +1805,6 @@ def segment_coherence_forward( contour_end="speed", **kwargs, ): - """ Percentage of particules and their targets after forward advection from a specific eddy. @@ -1886,6 +1893,48 @@ def mask_obs_close_event(self, merging=True, spliting=True, dt=3): mask_follow_obs(m, self.previous_obs, self.time, i_target, dt) return m + def swap_track( + self, + length_main_max_after_event=2, + length_secondary_min_after_event=10, + delta_pct_max=-0.2, + ): + events = self.splitting_event(triplet=True, only_index=True) + count = 0 + for i_main, i1, i2 in zip(*events): + seg_main, _, seg2 = ( + self.segment_track_array[i_main], + self.segment_track_array[i1], + self.segment_track_array[i2], + ) + i_start, i_end, i0 = self.index_segment_track + # For splitting + last_index_main = i_end[seg_main - i0] - 1 + last_index_secondary = i_end[seg2 - i0] - 1 + last_main_next_obs = self.next_obs[last_index_main] + t_event, t_main_end, t_secondary_start, t_secondary_end = ( + self.time[i_main], + self.time[last_index_main], + self.time[i2], + self.time[last_index_secondary], + ) + dt_main, dt_secondary = ( + t_main_end - t_event, + t_secondary_end - t_secondary_start, + ) + delta_cost = self.previous_cost[i2] - self.previous_cost[i1] + if ( + dt_main <= length_main_max_after_event + and dt_secondary >= length_secondary_min_after_event + and last_main_next_obs == -1 + and delta_cost > delta_pct_max + ): + self.segment[i1 : last_index_main + 1] = self.segment[i2] + self.segment[i2 : last_index_secondary + 1] = self.segment[i_main] + count += 1 + logger.info("%d segmnent swap on %d", count, len(events[0])) + return self.sort() + class Network: __slots__ = ( diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index 72031608..f710cf0a 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -2,38 +2,77 @@ """ Base class to manage eddy observation """ -import logging from datetime import datetime from io import BufferedReader, BytesIO +import logging from tarfile import ExFileObject from tokenize import TokenError -import packaging.version -import zarr +from Polygon import Polygon from matplotlib.cm import get_cmap from matplotlib.collections import LineCollection, PolyCollection from matplotlib.colors import Normalize from netCDF4 import Dataset -from numba import njit -from numba import types as numba_types -from numpy import (absolute, arange, array, array_equal, ceil, concatenate, - cos, datetime64, digitize, empty, errstate, floor, - histogram, histogram2d, in1d, isnan, linspace, ma, nan, - ndarray, ones, percentile, radians, sin, unique, where, - zeros) +from numba import njit, types as numba_types +from numpy import ( + absolute, + arange, + array, + array_equal, + ceil, + concatenate, + cos, + datetime64, + digitize, + empty, + errstate, + floor, + histogram, + histogram2d, + in1d, + isnan, + linspace, + ma, + nan, + ndarray, + ones, + percentile, + radians, + sin, + unique, + where, + zeros, +) +import packaging.version from pint import UnitRegistry from pint.errors import UndefinedUnitError -from Polygon import Polygon +import zarr from .. import VAR_DESCR, VAR_DESCR_inv, __version__ -from ..generic import (bbox_indice_regular, build_index, distance, - distance_grid, flatten_line_matrix, hist_numba, - local_to_coordinates, reverse_index, window_index, - wrap_longitude) -from ..poly import (bbox_intersection, close_center, convexs, - create_meshed_particles, create_vertice, - get_pixel_in_regular, insidepoly, poly_indexs, reduce_size, - vertice_overlap) +from ..generic import ( + bbox_indice_regular, + build_index, + distance, + distance_grid, + flatten_line_matrix, + hist_numba, + local_to_coordinates, + reverse_index, + window_index, + wrap_longitude, +) +from ..poly import ( + bbox_intersection, + close_center, + convexs, + create_meshed_particles, + create_vertice, + get_pixel_in_regular, + insidepoly, + poly_indexs, + reduce_size, + vertice_overlap, +) logger = logging.getLogger("pet") @@ -1808,8 +1847,8 @@ def extract_with_area(self, area, **kwargs): @property def time_datetime64(self): - dt = (datetime64('1970-01-01') - datetime64('1950-01-01')).astype('i8') - return (self.time - dt).astype('datetime64[D]') + dt = (datetime64("1970-01-01") - datetime64("1950-01-01")).astype("i8") + return (self.time - dt).astype("datetime64[D]") def time_sub_sample(self, t0, time_step): """ @@ -2215,7 +2254,7 @@ def grid_count(self, bins, intern=False, center=False, filter=slice(None)): x_ref = ((self.longitude[filter] - x0) % 360 + x0 - 180).reshape(-1, 1) x_contour, y_contour = self[x_name][filter], self[y_name][filter] grid_count_pixel_in( - grid, + grid.data, x_contour, y_contour, x_ref, @@ -2338,7 +2377,7 @@ def interp_grid( x, y = self.longitude, self.latitude if i is not None: x, y = x[i], y[i] - return grid_object.interp(varname, x,y , method) + return grid_object.interp(varname, x, y, method) elif method in ("min", "max", "mean", "count"): x0 = grid_object.x_bounds[0] x_name, y_name = self.intern(False if intern is None else intern) @@ -2352,7 +2391,7 @@ def interp_grid( grid_stat( grid_object.x_c, grid_object.y_c, - -grid if min_method else grid, + -grid.data if min_method else grid.data, grid.mask, x, y, diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 217b1d18..491b0c3a 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -5,12 +5,10 @@ import heapq -from numba import njit, prange -from numba import types as numba_types -from numpy import (arctan, array, concatenate, empty, nan, ones, pi, where, - zeros) -from numpy.linalg import lstsq from Polygon import Polygon +from numba import njit, prange, types as numba_types +from numpy import arctan, array, concatenate, empty, nan, ones, pi, where, zeros +from numpy.linalg import lstsq from .generic import build_index diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index 16616d5a..9329e3bd 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -409,14 +409,14 @@ def to_netcdf(self, handler): logger.debug('Create Dimensions "Nstep" : %d', nb_step) handler.createDimension("Nstep", nb_step) var_file_in = handler.createVariable( - zlib=True, + zlib=False, complevel=1, varname="FileIn", datatype="S1024", dimensions="Nstep", ) var_file_out = handler.createVariable( - zlib=True, + zlib=False, complevel=1, varname="FileOut", datatype="S1024", From b93850af1331b5bd0428ccd8832b8c479f0d019f Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 4 May 2023 16:20:39 +0200 Subject: [PATCH 091/115] Add example with correspondance --- .../pet_how_to_use_correspondances.py | 94 +++++++++++ .../pet_how_to_use_correspondances.ipynb | 155 ++++++++++++++++++ .../observations/observation.py | 6 +- src/py_eddy_tracker/tracking.py | 7 +- 4 files changed, 258 insertions(+), 4 deletions(-) create mode 100644 examples/08_tracking_manipulation/pet_how_to_use_correspondances.py create mode 100644 notebooks/python_module/08_tracking_manipulation/pet_how_to_use_correspondances.ipynb diff --git a/examples/08_tracking_manipulation/pet_how_to_use_correspondances.py b/examples/08_tracking_manipulation/pet_how_to_use_correspondances.py new file mode 100644 index 00000000..8161ad81 --- /dev/null +++ b/examples/08_tracking_manipulation/pet_how_to_use_correspondances.py @@ -0,0 +1,94 @@ +""" +Correspondances +=============== + +Correspondances is a mechanism to intend to continue tracking with new detection + +""" + +import logging + +# %% +from matplotlib import pyplot as plt +from netCDF4 import Dataset + +from py_eddy_tracker import start_logger +from py_eddy_tracker.data import get_remote_demo_sample +from py_eddy_tracker.featured_tracking.area_tracker import AreaTracker + +# In order to hide some warning +import py_eddy_tracker.observations.observation +from py_eddy_tracker.tracking import Correspondances + +py_eddy_tracker.observations.observation._display_check_warning = False + + +# %% +def plot_eddy(ed): + fig = plt.figure(figsize=(10, 5)) + ax = fig.add_axes([0.05, 0.03, 0.90, 0.94]) + ed.plot(ax, ref=-10, marker="x") + lc = ed.display_color(ax, field=ed.time, ref=-10, intern=True) + plt.colorbar(lc).set_label("Time in Julian days (from 1950/01/01)") + ax.set_xlim(4.5, 8), ax.set_ylim(36.8, 38.3) + ax.set_aspect("equal") + ax.grid() + + +# %% +# Get remote data, we will keep only 20 first days, +# `get_remote_demo_sample` function is only to get demo dataset, in your own case give a list of identification filename +# and don't mix cyclonic and anticyclonic files. +file_objects = get_remote_demo_sample( + "eddies_med_adt_allsat_dt2018/Anticyclonic_2010_2011_2012" +)[:20] + +# %% +# We run a traking with a tracker which use contour overlap, on 10 first time step +c_first_run = Correspondances( + datasets=file_objects[:10], class_method=AreaTracker, virtual=4 +) +start_logger().setLevel("INFO") +c_first_run.track() +start_logger().setLevel("WARNING") +with Dataset("correspondances.nc", "w") as h: + c_first_run.to_netcdf(h) +# Next step are done only to build atlas and display it +c_first_run.prepare_merging() + +# We have now an eddy object +eddies_area_tracker = c_first_run.merge(raw_data=False) +eddies_area_tracker.virtual[:] = eddies_area_tracker.time == 0 +eddies_area_tracker.filled_by_interpolation(eddies_area_tracker.virtual == 1) + +# %% +# Plot from first ten days +plot_eddy(eddies_area_tracker) + +# %% +# Restart from previous run +# ------------------------- +# We give all filenames, the new one and filename from previous run +c_second_run = Correspondances( + datasets=file_objects[:20], + # This parameter must be identical in each run + class_method=AreaTracker, + virtual=4, + # Previous saved correspondancs + previous_correspondance="correspondances.nc", +) +start_logger().setLevel("INFO") +c_second_run.track() +start_logger().setLevel("WARNING") +c_second_run.prepare_merging() +# We have now another eddy object +eddies_area_tracker_extend = c_second_run.merge(raw_data=False) +eddies_area_tracker_extend.virtual[:] = eddies_area_tracker_extend.time == 0 +eddies_area_tracker_extend.filled_by_interpolation( + eddies_area_tracker_extend.virtual == 1 +) + + +# %% +# Plot with time extension +plot_eddy(eddies_area_tracker_extend) diff --git a/notebooks/python_module/08_tracking_manipulation/pet_how_to_use_correspondances.ipynb b/notebooks/python_module/08_tracking_manipulation/pet_how_to_use_correspondances.ipynb new file mode 100644 index 00000000..0681c0fc --- /dev/null +++ b/notebooks/python_module/08_tracking_manipulation/pet_how_to_use_correspondances.ipynb @@ -0,0 +1,155 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n# Correspondances\n\nCorrespondances is a mechanism to intend to continue tracking with new detection\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import logging" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from matplotlib import pyplot as plt\nfrom netCDF4 import Dataset\n\nfrom py_eddy_tracker import start_logger\nfrom py_eddy_tracker.data import get_remote_demo_sample\nfrom py_eddy_tracker.featured_tracking.area_tracker import AreaTracker\n\n# In order to hide some warning\nimport py_eddy_tracker.observations.observation\nfrom py_eddy_tracker.tracking import Correspondances\n\npy_eddy_tracker.observations.observation._display_check_warning = False" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def plot_eddy(ed):\n fig = plt.figure(figsize=(10, 5))\n ax = fig.add_axes([0.05, 0.03, 0.90, 0.94])\n ed.plot(ax, ref=-10, marker=\"x\")\n lc = ed.display_color(ax, field=ed.time, ref=-10, intern=True)\n plt.colorbar(lc).set_label(\"Time in Julian days (from 1950/01/01)\")\n ax.set_xlim(4.5, 8), ax.set_ylim(36.8, 38.3)\n ax.set_aspect(\"equal\")\n ax.grid()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Get remote data, we will keep only 20 first days,\n`get_remote_demo_sample` function is only to get demo dataset, in your own case give a list of identification filename\nand don't mix cyclonic and anticyclonic files.\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "file_objects = get_remote_demo_sample(\n \"eddies_med_adt_allsat_dt2018/Anticyclonic_2010_2011_2012\"\n)[:20]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We run a traking with a tracker which use contour overlap, on 10 first time step\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "c_first_run = Correspondances(\n datasets=file_objects[:10], class_method=AreaTracker, virtual=4\n)\nstart_logger().setLevel(\"INFO\")\nc_first_run.track()\nstart_logger().setLevel(\"WARNING\")\nwith Dataset(\"correspondances.nc\", \"w\") as h:\n c_first_run.to_netcdf(h)\n# Next step are done only to build atlas and display it\nc_first_run.prepare_merging()\n\n# We have now an eddy object\neddies_area_tracker = c_first_run.merge(raw_data=False)\neddies_area_tracker.virtual[:] = eddies_area_tracker.time == 0\neddies_area_tracker.filled_by_interpolation(eddies_area_tracker.virtual == 1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot from first ten days\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "plot_eddy(eddies_area_tracker)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Restart from previous run\nWe give all filenames, the new one and filename from previous run\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "c_second_run = Correspondances(\n datasets=file_objects[:20],\n # This parameter must be identical in each run\n class_method=AreaTracker,\n virtual=4,\n # Previous saved correspondancs\n previous_correspondance=\"correspondances.nc\",\n)\nstart_logger().setLevel(\"INFO\")\nc_second_run.track()\nstart_logger().setLevel(\"WARNING\")\nc_second_run.prepare_merging()\n# We have now another eddy object\neddies_area_tracker_extend = c_second_run.merge(raw_data=False)\neddies_area_tracker_extend.virtual[:] = eddies_area_tracker_extend.time == 0\neddies_area_tracker_extend.filled_by_interpolation(\n eddies_area_tracker_extend.virtual == 1\n)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot with time extension\n\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "plot_eddy(eddies_area_tracker_extend)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.10" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/src/py_eddy_tracker/observations/observation.py b/src/py_eddy_tracker/observations/observation.py index f710cf0a..b39f7f83 100644 --- a/src/py_eddy_tracker/observations/observation.py +++ b/src/py_eddy_tracker/observations/observation.py @@ -80,6 +80,7 @@ _software_version_reduced = packaging.version.Version( "{v.major}.{v.minor}".format(v=packaging.version.parse(__version__)) ) +_display_check_warning = True def _check_versions(version): @@ -90,7 +91,8 @@ def _check_versions(version): :param version: string version of software used to create the file. If None, version was not provided :type version: str, None """ - + if not _display_check_warning: + return file_version = packaging.version.parse(version) if version is not None else None if file_version is None or file_version < _software_version_reduced: logger.warning( @@ -774,7 +776,7 @@ def load_file(cls, filename, **kwargs): zarr_file = filename_.endswith(end) else: zarr_file = False - logger.info(f"loading file '{filename}'") + logger.info(f"loading file '{filename_}'") if zarr_file: return cls.load_from_zarr(filename, **kwargs) else: diff --git a/src/py_eddy_tracker/tracking.py b/src/py_eddy_tracker/tracking.py index 9329e3bd..b64b6fcc 100644 --- a/src/py_eddy_tracker/tracking.py +++ b/src/py_eddy_tracker/tracking.py @@ -2,11 +2,11 @@ """ Class to store link between observations """ - from datetime import datetime, timedelta import json import logging import platform +from tarfile import ExFileObject from netCDF4 import Dataset, default_fillvals from numba import njit, types as numba_types @@ -375,7 +375,10 @@ def track(self): # We begin with second file, first one is in previous for file_name in self.datasets[first_dataset:]: self.swap_dataset(file_name, **kwargs) - logger.info("%s match with previous state", file_name) + filename_ = ( + file_name.filename if isinstance(file_name, ExFileObject) else file_name + ) + logger.info("%s match with previous state", filename_) logger.debug("%d obs to match", len(self.current_obs)) nb_real_obs = len(self.previous_obs) From 65971875c9aa530a4038bbc6b2b9321cc6c4374d Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 10 May 2023 16:53:13 +0200 Subject: [PATCH 092/115] Correction on vertice overlap which use same variable for two things ... --- src/py_eddy_tracker/poly.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/py_eddy_tracker/poly.py b/src/py_eddy_tracker/poly.py index 491b0c3a..b5849610 100644 --- a/src/py_eddy_tracker/poly.py +++ b/src/py_eddy_tracker/poly.py @@ -477,22 +477,22 @@ def vertice_overlap( if intersection == 0: cost[i] = 0 continue - p0_area, p1_area = p0.area(), p1.area() + p0_area_, p1_area_ = p0.area(), p1.area() if minimal_area: - cost_ = intersection / min(p0_area, p1_area) + cost_ = intersection / min(p0_area_, p1_area_) # we divide intersection with p1 elif p1_area: - cost_ = intersection / p1_area + cost_ = intersection / p1_area_ # we divide intersection with polygon merging result from 0 to 1 else: - cost_ = intersection / (p0_area + p1_area - intersection) + cost_ = intersection / (p0_area_ + p1_area_ - intersection) if cost_ >= min_overlap: cost[i] = cost_ else: if ( hybrid_area and cost_ != 0 - and (intersection / min(p0_area, p1_area)) > 0.99 + and (intersection / min(p0_area_, p1_area_)) > 0.99 ): cost[i] = cost_ else: From e659c503f62e02f511f4a151f122756b90e22522 Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Wed, 20 Sep 2023 09:50:59 +0200 Subject: [PATCH 093/115] Update pet_okubo_weiss.py change sign in formula --- examples/06_grid_manipulation/pet_okubo_weiss.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/06_grid_manipulation/pet_okubo_weiss.py b/examples/06_grid_manipulation/pet_okubo_weiss.py index 818a6742..aa8a063e 100644 --- a/examples/06_grid_manipulation/pet_okubo_weiss.py +++ b/examples/06_grid_manipulation/pet_okubo_weiss.py @@ -2,7 +2,7 @@ Get Okubo Weis ============== -.. math:: OW = S_n^2 + S_s^2 + \omega^2 +.. math:: OW = S_n^2 + S_s^2 - \omega^2 with normal strain (:math:`S_n`), shear strain (:math:`S_s`) and vorticity (:math:`\omega`) From 73b017c9e4c7a60c33ad339eb82248bc3bda9237 Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Wed, 27 Sep 2023 15:34:01 +0200 Subject: [PATCH 094/115] issue #207 change speed compute formula to avoid mask shrinking --- src/py_eddy_tracker/dataset/grid.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index 7e9a04be..edb96bac 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -38,6 +38,7 @@ round_, sin, sinc, + sqrt, where, zeros, ) @@ -1919,7 +1920,8 @@ def speed_coef_mean(self, contour): def init_speed_coef(self, uname="u", vname="v"): """Draft""" - self._speed_ev = (self.grid(uname) ** 2 + self.grid(vname) ** 2) ** 0.5 + u, v = self.grid(uname), self.grid(vname) + self._speed_ev = sqrt(u * u + v * v) def display(self, ax, name, factor=1, ref=None, **kwargs): """ From c7430cef0f9f133296db0ef144ebd15a73d73300 Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Thu, 21 Dec 2023 10:49:48 +0100 Subject: [PATCH 095/115] Update python-app.yml python version --- .github/workflows/python-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 00dbcc95..f2f4753e 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -11,7 +11,7 @@ jobs: matrix: # os: [ubuntu-latest, macos-latest, windows-latest] os: [ubuntu-latest, windows-latest] - python_version: ['3.10'] + python_version: ['3.10', '3.11', '3.12'] name: Run py eddy tracker build tests runs-on: ${{ matrix.os }} defaults: From 62e283be6558e9abec374d153bfbbaa3ffb8415a Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:50:11 +0200 Subject: [PATCH 096/115] specify ubuntu lts --- .github/workflows/python-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index f2f4753e..7c93faae 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -10,7 +10,7 @@ jobs: strategy: matrix: # os: [ubuntu-latest, macos-latest, windows-latest] - os: [ubuntu-latest, windows-latest] + os: [ubuntu-lts-latest, windows-latest] python_version: ['3.10', '3.11', '3.12'] name: Run py eddy tracker build tests runs-on: ${{ matrix.os }} From e6d4ada203bfe29b2a7231428948fcba7bbbcc74 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:56:25 +0200 Subject: [PATCH 097/115] Specify os for readthedocs --- .readthedocs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 1299f38e..ec749526 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,6 +1,8 @@ version: 2 conda: environment: doc/environment.yml +build: + os: ubuntu-lts-latest python: install: - method: setuptools From 80f8e143adf5ae992286cbe42336730e88a06c9e Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 14:58:56 +0200 Subject: [PATCH 098/115] Add python spec for readthedocs --- .readthedocs.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index ec749526..a04495b6 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -3,7 +3,9 @@ conda: environment: doc/environment.yml build: os: ubuntu-lts-latest + tools: + python: "3.10" python: - install: - - method: setuptools - path: . + install: + - method: setuptools + path: . From 27bafc6d7bb21b02e2ceada0d048a906b4616609 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 15:08:35 +0200 Subject: [PATCH 099/115] update versionner --- src/py_eddy_tracker/__init__.py | 12 +- src/py_eddy_tracker/_version.py | 449 ++++++++++++++++++++------------ 2 files changed, 296 insertions(+), 165 deletions(-) diff --git a/src/py_eddy_tracker/__init__.py b/src/py_eddy_tracker/__init__.py index 0a98892d..7115bf67 100644 --- a/src/py_eddy_tracker/__init__.py +++ b/src/py_eddy_tracker/__init__.py @@ -32,13 +32,13 @@ del get_versions -def start_logger(): +def start_logger(color=True): FORMAT_LOG = "%(levelname)-8s %(asctime)s %(module)s.%(funcName)s :\n\t%(message)s" logger = logging.getLogger("pet") if len(logger.handlers) == 0: # set up logging to CONSOLE console = logging.StreamHandler() - console.setFormatter(ColoredFormatter(FORMAT_LOG)) + console.setFormatter(ColoredFormatter(FORMAT_LOG, color=color)) # add the handler to the root logger logger.addHandler(console) return logger @@ -53,13 +53,14 @@ class ColoredFormatter(logging.Formatter): DEBUG="\033[34m\t", ) - def __init__(self, message): + def __init__(self, message, color=True): super().__init__(message) + self.with_color = color def format(self, record): color = self.COLOR_LEVEL.get(record.levelname, "") color_reset = "\033[0m" - model = color + "%s" + color_reset + model = (color + "%s" + color_reset) if self.with_color else "%s" record.msg = model % record.msg record.funcName = model % record.funcName record.module = model % record.module @@ -696,3 +697,6 @@ def identify_time(str_date): VAR_DESCR_inv[VAR_DESCR[key]["nc_name"]] = key for key_old in VAR_DESCR[key].get("old_nc_name", list()): VAR_DESCR_inv[key_old] = key + +from . import _version +__version__ = _version.get_versions()['version'] diff --git a/src/py_eddy_tracker/_version.py b/src/py_eddy_tracker/_version.py index 44367e3a..589e706f 100644 --- a/src/py_eddy_tracker/_version.py +++ b/src/py_eddy_tracker/_version.py @@ -1,11 +1,13 @@ + # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# This file is released into the public domain. +# Generated by versioneer-0.29 +# https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" @@ -14,9 +16,11 @@ import re import subprocess import sys +from typing import Any, Callable, Dict, List, Optional, Tuple +import functools -def get_keywords(): +def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must @@ -32,8 +36,15 @@ def get_keywords(): class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool + -def get_config(): +def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py @@ -51,41 +62,50 @@ class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" -LONG_VERSION_PY = {} -HANDLERS = {} - +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f - return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + + popen_kwargs: Dict[str, Any] = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) break - except EnvironmentError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -96,18 +116,20 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -116,64 +138,64 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -186,11 +208,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -199,7 +221,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -207,30 +229,33 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -241,7 +266,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -249,33 +282,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -284,16 +341,17 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] + git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) return pieces # tag @@ -302,12 +360,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] + pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -318,26 +374,27 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -355,29 +412,78 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces: Dict[str, Any]) -> str: + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces: Dict[str, Any]) -> str: + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -404,12 +510,41 @@ def render_pep440_post(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -426,7 +561,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -446,7 +581,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -466,26 +601,28 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -495,16 +632,12 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} -def get_versions(): +def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some @@ -515,7 +648,8 @@ def get_versions(): verbose = cfg.verbose try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) except NotThisMethod: pass @@ -524,16 +658,13 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split("/"): + for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) @@ -547,10 +678,6 @@ def get_versions(): except NotThisMethod: pass - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} From 0cb3230e8a97731e25d8e8d65df1e405c1d6406c Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 15:13:00 +0200 Subject: [PATCH 100/115] versionner file --- versioneer.py | 1354 +++++++++++++++++++++++++++++++------------------ 1 file changed, 873 insertions(+), 481 deletions(-) diff --git a/versioneer.py b/versioneer.py index 2b545405..1e3753e6 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,4 +1,5 @@ -# Version: 0.18 + +# Version: 0.29 """The Versioneer - like a rocketeer, but for versions. @@ -6,18 +7,14 @@ ============== * like a rocketeer, but for versions! -* https://github.com/warner/python-versioneer +* https://github.com/python-versioneer/python-versioneer * Brian Warner -* License: Public Domain -* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy -* [![Latest Version] -(https://pypip.in/version/versioneer/badge.svg?style=flat) -](https://pypi.python.org/pypi/versioneer/) -* [![Build Status] -(https://travis-ci.org/warner/python-versioneer.png?branch=master) -](https://travis-ci.org/warner/python-versioneer) - -This is a tool for managing a recorded version number in distutils-based +* License: Public Domain (Unlicense) +* Compatible with: Python 3.7, 3.8, 3.9, 3.10, 3.11 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] + +This is a tool for managing a recorded version number in setuptools-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control @@ -26,9 +23,38 @@ ## Quick Install -* `pip install versioneer` to somewhere to your $PATH -* add a `[versioneer]` section to your setup.cfg (see below) -* run `versioneer install` in your source tree, commit the results +Versioneer provides two installation modes. The "classic" vendored mode installs +a copy of versioneer into your repository. The experimental build-time dependency mode +is intended to allow you to skip this step and simplify the process of upgrading. + +### Vendored mode + +* `pip install versioneer` to somewhere in your $PATH + * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is + available, so you can also use `conda install -c conda-forge versioneer` +* add a `[tool.versioneer]` section to your `pyproject.toml` or a + `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) + * Note that you will need to add `tomli; python_version < "3.11"` to your + build-time dependencies if you use `pyproject.toml` +* run `versioneer install --vendor` in your source tree, commit the results +* verify version information with `python setup.py version` + +### Build-time dependency mode + +* `pip install versioneer` to somewhere in your $PATH + * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is + available, so you can also use `conda install -c conda-forge versioneer` +* add a `[tool.versioneer]` section to your `pyproject.toml` or a + `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) +* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) + to the `requires` key of the `build-system` table in `pyproject.toml`: + ```toml + [build-system] + requires = ["setuptools", "versioneer[toml]"] + build-backend = "setuptools.build_meta" + ``` +* run `versioneer install --no-vendor` in your source tree, commit the results +* verify version information with `python setup.py version` ## Version Identifiers @@ -60,7 +86,7 @@ for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes. +uncommitted changes). The version identifier is used for multiple purposes: @@ -165,7 +191,7 @@ Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github -[issues page](https://github.com/warner/python-versioneer/issues). +[issues page](https://github.com/python-versioneer/python-versioneer/issues). ### Subprojects @@ -179,7 +205,7 @@ `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other langauges) in subdirectories. + provide bindings to Python (and perhaps other languages) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs @@ -193,9 +219,9 @@ Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. -[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking this issue. The discussion in -[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve @@ -223,31 +249,20 @@ cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. -[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. -### Unicode version strings - -While Versioneer works (and is continually tested) with both Python 2 and -Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. -Newer releases probably generate unicode version strings on py2. It's not -clear that this is wrong, but it may be surprising for applications when then -write these strings to a network connection or include them in bytes-oriented -APIs like cryptographic checksums. - -[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates -this question. - ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace +* edit `setup.cfg` and `pyproject.toml`, if necessary, + to include any new configuration settings indicated by the release notes. + See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install --[no-]vendor` in your source tree, to replace `SRC/_version.py` * commit any changed files @@ -264,36 +279,70 @@ direction and include code from all supported VCS systems, reducing the number of intermediate scripts. +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . +Specifically, both are released under the "Unlicense", as described in +https://unlicense.org/. -""" +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer -from __future__ import print_function +""" +# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring +# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements +# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error +# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with +# pylint:disable=attribute-defined-outside-init,too-many-arguments -try: - import configparser -except ImportError: - import ConfigParser as configparser +import configparser import errno import json import os import re import subprocess import sys +from pathlib import Path +from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Union +from typing import NoReturn +import functools + +have_tomllib = True +if sys.version_info >= (3, 11): + import tomllib +else: + try: + import tomli as tomllib + except ImportError: + have_tomllib = False class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + versionfile_source: str + versionfile_build: Optional[str] + parentdir_prefix: Optional[str] + verbose: Optional[bool] -def get_root(): + +def get_root() -> str: """Get the project root directory. We require that all commands are run from the project root, i.e. the @@ -301,20 +350,28 @@ def get_root(): """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") + pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + if not ( + os.path.exists(setup_py) + or os.path.exists(pyproject_toml) + or os.path.exists(versioneer_py) + ): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") + pyproject_toml = os.path.join(root, "pyproject.toml") versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ( - "Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND')." - ) + if not ( + os.path.exists(setup_py) + or os.path.exists(pyproject_toml) + or os.path.exists(versioneer_py) + ): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools @@ -323,46 +380,62 @@ def get_root(): # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. - me = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(me)[0]) + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print( - "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(me), versioneer_py) - ) + if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py)) except NameError: pass return root -def get_config_from_root(root): +def get_config_from_root(root: str) -> VersioneerConfig: """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or + # This might raise OSError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.SafeConfigParser() - with open(setup_cfg, "r") as f: - parser.readfp(f) - VCS = parser.get("versioneer", "VCS") # mandatory - - def get(parser, name): - if parser.has_option("versioneer", name): - return parser.get("versioneer", name) - return None + root_pth = Path(root) + pyproject_toml = root_pth / "pyproject.toml" + setup_cfg = root_pth / "setup.cfg" + section: Union[Dict[str, Any], configparser.SectionProxy, None] = None + if pyproject_toml.exists() and have_tomllib: + try: + with open(pyproject_toml, 'rb') as fobj: + pp = tomllib.load(fobj) + section = pp['tool']['versioneer'] + except (tomllib.TOMLDecodeError, KeyError) as e: + print(f"Failed to load config from {pyproject_toml}: {e}") + print("Try to load it from setup.cfg") + if not section: + parser = configparser.ConfigParser() + with open(setup_cfg) as cfg_file: + parser.read_file(cfg_file) + parser.get("versioneer", "VCS") # raise error if missing + + section = parser["versioneer"] + + # `cast`` really shouldn't be used, but its simplest for the + # common VersioneerConfig users at the moment. We verify against + # `None` values elsewhere where it matters cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = get(parser, "style") or "" - cfg.versionfile_source = get(parser, "versionfile_source") - cfg.versionfile_build = get(parser, "versionfile_build") - cfg.tag_prefix = get(parser, "tag_prefix") - if cfg.tag_prefix in ("''", '""'): + cfg.VCS = section['VCS'] + cfg.style = section.get("style", "") + cfg.versionfile_source = cast(str, section.get("versionfile_source")) + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = cast(str, section.get("tag_prefix")) + if cfg.tag_prefix in ("''", '""', None): cfg.tag_prefix = "" - cfg.parentdir_prefix = get(parser, "parentdir_prefix") - cfg.verbose = get(parser, "verbose") + cfg.parentdir_prefix = section.get("parentdir_prefix") + if isinstance(section, configparser.SectionProxy): + # Make sure configparser translates to bool + cfg.verbose = section.getboolean("verbose") + else: + cfg.verbose = section.get("verbose") + return cfg @@ -371,41 +444,48 @@ class NotThisMethod(Exception): # these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - - def decorate(f): +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f + HANDLERS.setdefault(vcs, {})[method] = f return f - return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + + popen_kwargs: Dict[str, Any] = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen( - [c] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) break - except EnvironmentError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -416,28 +496,25 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= if verbose: print("unable to find command, tried %s" % (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode -LONG_VERSION_PY[ - "git" -] = ''' +LONG_VERSION_PY['git'] = r''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. -# This file is released into the public domain. Generated by -# versioneer-0.18 (https://github.com/warner/python-versioneer) +# This file is released into the public domain. +# Generated by versioneer-0.29 +# https://github.com/python-versioneer/python-versioneer """Git implementation of _version.py.""" @@ -446,9 +523,11 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env= import re import subprocess import sys +from typing import Any, Callable, Dict, List, Optional, Tuple +import functools -def get_keywords(): +def get_keywords() -> Dict[str, str]: """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must @@ -464,8 +543,15 @@ def get_keywords(): class VersioneerConfig: """Container for Versioneer configuration parameters.""" + VCS: str + style: str + tag_prefix: str + parentdir_prefix: str + versionfile_source: str + verbose: bool + -def get_config(): +def get_config() -> VersioneerConfig: """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py @@ -483,13 +569,13 @@ class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" -LONG_VERSION_PY = {} -HANDLERS = {} +LONG_VERSION_PY: Dict[str, str] = {} +HANDLERS: Dict[str, Dict[str, Callable]] = {} -def register_vcs_handler(vcs, method): # decorator - """Decorator to mark a method as the handler for a particular VCS.""" - def decorate(f): +def register_vcs_handler(vcs: str, method: str) -> Callable: # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f: Callable) -> Callable: """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} @@ -498,22 +584,35 @@ def decorate(f): return decorate -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): +def run_command( + commands: List[str], + args: List[str], + cwd: Optional[str] = None, + verbose: bool = False, + hide_stderr: bool = False, + env: Optional[Dict[str, str]] = None, +) -> Tuple[Optional[str], Optional[int]]: """Call the given command(s).""" assert isinstance(commands, list) - p = None - for c in commands: + process = None + + popen_kwargs: Dict[str, Any] = {} + if sys.platform == "win32": + # This hides the console window if pythonw.exe is used + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + popen_kwargs["startupinfo"] = startupinfo + + for command in commands: try: - dispcmd = str([c] + args) + dispcmd = str([command] + args) # remember shell=False, so use git.cmd on windows, not just git - p = subprocess.Popen([c] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None), **popen_kwargs) break - except EnvironmentError: - e = sys.exc_info()[1] + except OSError as e: if e.errno == errno.ENOENT: continue if verbose: @@ -524,18 +623,20 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None - stdout = p.communicate()[0].strip() - if sys.version_info[0] >= 3: - stdout = stdout.decode() - if p.returncode != 0: + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) - return None, p.returncode - return stdout, p.returncode + return None, process.returncode + return stdout, process.returncode -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -544,15 +645,14 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% @@ -561,41 +661,48 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -608,11 +715,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d @@ -621,7 +728,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r'\d', r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: @@ -630,6 +737,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %%s" %% r) return {"version": r, @@ -645,7 +757,12 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -656,8 +773,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) @@ -665,24 +789,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -699,7 +856,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? + # unparsable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces @@ -724,26 +881,27 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], - cwd=root) - pieces["distance"] = int(count_out) # total number of commits + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], - cwd=root)[0].strip() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -768,23 +926,71 @@ def render_pep440(pieces): return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces: Dict[str, Any]) -> str: + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces: Dict[str, Any]) -> str: + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%%d" %% pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%%d" %% (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%%d" %% pieces["distance"] + rendered = "0.post0.dev%%d" %% pieces["distance"] return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -811,12 +1017,41 @@ def render_pep440_post(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -833,7 +1068,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -853,7 +1088,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -873,7 +1108,7 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", @@ -887,10 +1122,14 @@ def render(pieces, style): if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -905,7 +1144,7 @@ def render(pieces, style): "date": pieces.get("date")} -def get_versions(): +def get_versions() -> Dict[str, Any]: """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some @@ -926,7 +1165,7 @@ def get_versions(): # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. - for i in cfg.versionfile_source.split('/'): + for _ in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, @@ -953,41 +1192,48 @@ def get_versions(): @register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): +def git_get_keywords(versionfile_abs: str) -> Dict[str, str]: """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. - keywords = {} + keywords: Dict[str, str] = {} try: - f = open(versionfile_abs, "r") - for line in f.readlines(): - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - f.close() - except EnvironmentError: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except OSError: pass return keywords @register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): +def git_versions_from_keywords( + keywords: Dict[str, str], + tag_prefix: str, + verbose: bool, +) -> Dict[str, Any]: """Get version information from git keywords.""" - if not keywords: - raise NotThisMethod("no keywords at all, weird") + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") date = keywords.get("date") if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because @@ -1000,11 +1246,11 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = set([r.strip() for r in refnames.strip("()").split(",")]) + refs = {r.strip() for r in refnames.strip("()").split(",")} # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " - tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)]) + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d @@ -1013,7 +1259,7 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". - tags = set([r for r in refs if re.search(r"\d", r)]) + tags = {r for r in refs if re.search(r'\d', r)} if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: @@ -1021,30 +1267,33 @@ def git_versions_from_keywords(keywords, tag_prefix, verbose): for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue if verbose: print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): +def git_pieces_from_vcs( + tag_prefix: str, + root: str, + verbose: bool, + runner: Callable = run_command +) -> Dict[str, Any]: """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* @@ -1055,7 +1304,15 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + # GIT_DIR can interfere with correct operation of Versioneer. + # It may be intended to be passed to the Versioneer-versioned project, + # but that should not change where we get our version from. + env = os.environ.copy() + env.pop("GIT_DIR", None) + runner = functools.partial(runner, env=env) + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=not verbose) if rc != 0: if verbose: print("Directory %s not under git control" % root) @@ -1063,33 +1320,57 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = run_command( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) + describe_out, rc = runner(GITS, [ + "describe", "--tags", "--dirty", "--always", "--long", + "--match", f"{tag_prefix}[[:digit:]]*" + ], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() - full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() - pieces = {} + pieces: Dict[str, Any] = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out @@ -1098,16 +1379,17 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] + git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: - # unparseable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + # unparsable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) return pieces # tag @@ -1116,12 +1398,10 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] + pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) @@ -1132,19 +1412,20 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): else: # HEX: no tags pieces["closest-tag"] = None - count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits + out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) + pieces["distance"] = len(out.split()) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[ - 0 - ].strip() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces -def do_vcs_install(manifest_in, versionfile_source, ipy): +def do_vcs_install(versionfile_source: str, ipy: Optional[str]) -> None: """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py @@ -1153,36 +1434,40 @@ def do_vcs_install(manifest_in, versionfile_source, ipy): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] + files = [versionfile_source] if ipy: files.append(ipy) - try: - me = __file__ - if me.endswith(".pyc") or me.endswith(".pyo"): - me = os.path.splitext(me)[0] + ".py" - versioneer_file = os.path.relpath(me) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) + if "VERSIONEER_PEP518" not in globals(): + try: + my_path = __file__ + if my_path.endswith((".pyc", ".pyo")): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) present = False try: - f = open(".gitattributes", "r") - for line in f.readlines(): - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - f.close() - except EnvironmentError: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except OSError: pass if not present: - f = open(".gitattributes", "a+") - f.write("%s export-subst\n" % versionfile_source) - f.close() + with open(".gitattributes", "a+") as fobj: + fobj.write(f"{versionfile_source} export-subst\n") files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) -def versions_from_parentdir(parentdir_prefix, root, verbose): +def versions_from_parentdir( + parentdir_prefix: str, + root: str, + verbose: bool, +) -> Dict[str, Any]: """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both @@ -1191,30 +1476,23 @@ def versions_from_parentdir(parentdir_prefix, root, verbose): """ rootdirs = [] - for i in range(3): + for _ in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - else: - rootdirs.append(root) - root = os.path.dirname(root) # up a level + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.18) from +# This file was generated by 'versioneer.py' (0.29) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. @@ -1231,43 +1509,41 @@ def get_versions(): """ -def versions_from_file(filename): +def versions_from_file(filename: str) -> Dict[str, Any]: """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() - except EnvironmentError: + except OSError: raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) -def write_to_version_file(filename, versions): +def write_to_version_file(filename: str, versions: Dict[str, Any]) -> None: """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) -def plus_or_dot(pieces): +def plus_or_dot(pieces: Dict[str, Any]) -> str: """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" -def render_pep440(pieces): +def render_pep440(pieces: Dict[str, Any]) -> str: """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you @@ -1285,29 +1561,78 @@ def render_pep440(pieces): rendered += ".dirty" else: # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered -def render_pep440_pre(pieces): - """TAG[.post.devDISTANCE] -- No -dirty. +def render_pep440_branch(pieces: Dict[str, Any]) -> str: + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). Exceptions: - 1: no tags. 0.post.devDISTANCE + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def pep440_split_post(ver: str) -> Tuple[str, Optional[int]]: + """Split pep440 version string at the post-release segment. + + Returns the release segments before the post-release and the + post-release version number (or -1 if no post-release segment is present). + """ + vc = str.split(ver, ".post") + return vc[0], int(vc[1] or 0) if len(vc) == 2 else None + + +def render_pep440_pre(pieces: Dict[str, Any]) -> str: + """TAG[.postN.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: if pieces["distance"]: - rendered += ".post.dev%d" % pieces["distance"] + # update the post release segment + tag_version, post_version = pep440_split_post(pieces["closest-tag"]) + rendered = tag_version + if post_version is not None: + rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) + else: + rendered += ".post0.dev%d" % (pieces["distance"]) + else: + # no commits, use the tag as the version + rendered = pieces["closest-tag"] else: # exception #1 - rendered = "0.post.dev%d" % pieces["distance"] + rendered = "0.post0.dev%d" % pieces["distance"] return rendered -def render_pep440_post(pieces): +def render_pep440_post(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards @@ -1334,12 +1659,41 @@ def render_pep440_post(pieces): return rendered -def render_pep440_old(pieces): +def render_pep440_post_branch(pieces: Dict[str, Any]) -> str: + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces: Dict[str, Any]) -> str: """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. - Eexceptions: + Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: @@ -1356,7 +1710,7 @@ def render_pep440_old(pieces): return rendered -def render_git_describe(pieces): +def render_git_describe(pieces: Dict[str, Any]) -> str: """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. @@ -1376,7 +1730,7 @@ def render_git_describe(pieces): return rendered -def render_git_describe_long(pieces): +def render_git_describe_long(pieces: Dict[str, Any]) -> str: """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. @@ -1396,26 +1750,28 @@ def render_git_describe_long(pieces): return rendered -def render(pieces, style): +def render(pieces: Dict[str, Any], style: str) -> Dict[str, Any]: """Render the given version pieces into the requested style.""" if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": @@ -1425,20 +1781,16 @@ def render(pieces, style): else: raise ValueError("unknown style '%s'" % style) - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" -def get_versions(verbose=False): +def get_versions(verbose: bool = False) -> Dict[str, Any]: """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. @@ -1453,10 +1805,9 @@ def get_versions(verbose=False): assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" + verbose = verbose or bool(cfg.verbose) # `bool()` used to avoid `None` + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) @@ -1510,22 +1861,22 @@ def get_versions(verbose=False): if verbose: print("unable to compute version") - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} -def get_version(): +def get_version() -> str: """Get the short version string for this project.""" return get_versions()["version"] -def get_cmdclass(): - """Get the custom setuptools/distutils subclasses used by Versioneer.""" +def get_cmdclass(cmdclass: Optional[Dict[str, Any]] = None): + """Get the custom setuptools subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and @@ -1539,25 +1890,25 @@ def get_cmdclass(): # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/warner/python-versioneer/issues/52 + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - cmds = {} + cmds = {} if cmdclass is None else cmdclass.copy() - # we add "version" to both distutils and setuptools - from distutils.core import Command + # we add "version" to setuptools + from setuptools import Command class cmd_version(Command): description = "report generated version string" - user_options = [] - boolean_options = [] + user_options: List[Tuple[str, str, str]] = [] + boolean_options: List[str] = [] - def initialize_options(self): + def initialize_options(self) -> None: pass - def finalize_options(self): + def finalize_options(self) -> None: pass - def run(self): + def run(self) -> None: vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) @@ -1565,10 +1916,9 @@ def run(self): print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - # we override "build_py" in both distutils and setuptools + # we override "build_py" in setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py @@ -1583,30 +1933,68 @@ def run(self): # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? + # pip install -e . and setuptool/editable_wheel will invoke build_py + # but the build_py command is not expected to copy any files. + # we override different "build_py" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py + if 'build_py' in cmds: + _build_py: Any = cmds['build_py'] else: - from distutils.command.build_py import build_py as _build_py + from setuptools.command.build_py import build_py as _build_py class cmd_build_py(_build_py): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) + if getattr(self, "editable_mode", False): + # During editable installs `.py` and data files are + # not copied to build_lib + return # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe + if 'build_ext' in cmds: + _build_ext: Any = cmds['build_ext'] + else: + from setuptools.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self) -> None: + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if not cfg.versionfile_build: + return + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + if not os.path.exists(target_versionfile): + print(f"Warning: {target_versionfile} does not exist, skipping " + "version update. This can happen if you are running build_ext " + "without first running build_py.") + return + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe # type: ignore # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ @@ -1615,7 +2003,7 @@ def run(self): # ... class cmd_build_exe(_build_exe): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1627,28 +2015,24 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] - if "py2exe" in sys.modules: # py2exe enabled? + if 'py2exe' in sys.modules: # py2exe enabled? try: - from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + from py2exe.setuptools_buildexe import py2exe as _py2exe # type: ignore except ImportError: - from py2exe.build_exe import py2exe as _py2exe # py2 + from py2exe.distutils_buildexe import py2exe as _py2exe # type: ignore class cmd_py2exe(_py2exe): - def run(self): + def run(self) -> None: root = get_root() cfg = get_config_from_root(root) versions = get_versions() @@ -1660,27 +2044,60 @@ def run(self): os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) cmds["py2exe"] = cmd_py2exe + # sdist farms its file list building out to egg_info + if 'egg_info' in cmds: + _egg_info: Any = cmds['egg_info'] + else: + from setuptools.command.egg_info import egg_info as _egg_info + + class cmd_egg_info(_egg_info): + def find_sources(self) -> None: + # egg_info.find_sources builds the manifest list and writes it + # in one shot + super().find_sources() + + # Modify the filelist and normalize it + root = get_root() + cfg = get_config_from_root(root) + self.filelist.append('versioneer.py') + if cfg.versionfile_source: + # There are rare cases where versionfile_source might not be + # included by default, so we must be explicit + self.filelist.append(cfg.versionfile_source) + self.filelist.sort() + self.filelist.remove_duplicates() + + # The write method is hidden in the manifest_maker instance that + # generated the filelist and was thrown away + # We will instead replicate their final normalization (to unicode, + # and POSIX-style paths) + from setuptools import unicode_utils + normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') + for f in self.filelist.files] + + manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') + with open(manifest_filename, 'w') as fobj: + fobj.write('\n'.join(normalized)) + + cmds['egg_info'] = cmd_egg_info + # we override different "sdist" commands for both environments - if "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist + if 'sdist' in cmds: + _sdist: Any = cmds['sdist'] else: - from distutils.command.sdist import sdist as _sdist + from setuptools.command.sdist import sdist as _sdist class cmd_sdist(_sdist): - def run(self): + def run(self) -> None: versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old @@ -1688,7 +2105,7 @@ def run(self): self.distribution.metadata.version = versions["version"] return _sdist.run(self) - def make_release_tree(self, base_dir, files): + def make_release_tree(self, base_dir: str, files: List[str]) -> None: root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) @@ -1697,10 +2114,8 @@ def make_release_tree(self, base_dir, files): # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) - + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds @@ -1743,25 +2158,28 @@ def make_release_tree(self, base_dir, files): """ -INIT_PY_SNIPPET = """ +OLD_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + -def do_setup(): - """Main VCS-independent setup function for installing Versioneer.""" +def do_setup() -> int: + """Do main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) - except ( - EnvironmentError, - configparser.NoSectionError, - configparser.NoOptionError, - ) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", file=sys.stderr) + except (OSError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (OSError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) @@ -1770,76 +2188,46 @@ def do_setup(): print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + maybe_ipy: Optional[str] = ipy if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() - except EnvironmentError: + except OSError: old = "" - if INIT_PY_SNIPPET not in old: + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: - f.write(INIT_PY_SNIPPET) + f.write(snippet) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print( - " appending versionfile_source ('%s') to MANIFEST.in" - % cfg.versionfile_source - ) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") + maybe_ipy = None # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + do_vcs_install(cfg.versionfile_source, maybe_ipy) return 0 -def scan_setup_py(): +def scan_setup_py() -> int: """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False @@ -1876,10 +2264,14 @@ def scan_setup_py(): return errors +def setup_command() -> NoReturn: + """Set up Versioneer and exit with appropriate error code.""" + errors = do_setup() + errors += scan_setup_py() + sys.exit(1 if errors else 0) + + if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1) + setup_command() From 4afa5f14b082b5faebce21feb4abe7b246286fb0 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 15:22:27 +0200 Subject: [PATCH 101/115] update for documentation --- .github/workflows/python-app.yml | 2 +- .readthedocs.yml | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 7c93faae..f2f4753e 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -10,7 +10,7 @@ jobs: strategy: matrix: # os: [ubuntu-latest, macos-latest, windows-latest] - os: [ubuntu-lts-latest, windows-latest] + os: [ubuntu-latest, windows-latest] python_version: ['3.10', '3.11', '3.12'] name: Run py eddy tracker build tests runs-on: ${{ matrix.os }} diff --git a/.readthedocs.yml b/.readthedocs.yml index a04495b6..4b698a62 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,8 +4,10 @@ conda: build: os: ubuntu-lts-latest tools: - python: "3.10" + python: "mambaforge-latest" python: install: - method: setuptools path: . +sphinx: + configuration: docs/conf.py \ No newline at end of file From 3a54bbb6e50ebc38dcb334539dcb5b456a408127 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 29 Jul 2024 15:24:56 +0200 Subject: [PATCH 102/115] again doc --- .readthedocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 4b698a62..ddfbb747 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -9,5 +9,3 @@ python: install: - method: setuptools path: . -sphinx: - configuration: docs/conf.py \ No newline at end of file From d5d3aed5f4ac51b361697638144fc19410f04093 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 12 Nov 2024 17:02:22 +0100 Subject: [PATCH 103/115] correction on lagerloef uv --- src/py_eddy_tracker/dataset/grid.py | 67 +++++++++++++---------------- 1 file changed, 29 insertions(+), 38 deletions(-) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index edb96bac..d8a48f69 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -1670,7 +1670,7 @@ def spectrum_lonlat(self, grid_name, area=None, ref=None, **kwargs): (lat_content[0], lat_content[1] / ref_lat_content[1]), ) - def compute_finite_difference(self, data, schema=1, mode="reflect", vertical=False): + def compute_finite_difference(self, data, schema=1, mode="reflect", vertical=False, second=False): if not isinstance(schema, int) and schema < 1: raise Exception("schema must be a positive int") @@ -1694,13 +1694,16 @@ def compute_finite_difference(self, data, schema=1, mode="reflect", vertical=Fal data2[:schema] = nan # Distance for one degree - d = self.EARTH_RADIUS * 2 * pi / 360 + d = self.EARTH_RADIUS * 2 * pi / 360 * 2 * schema # Mulitply by 2 step if vertical: - d *= self.ystep * 2 * schema + d *= self.ystep else: - d *= self.xstep * cos(deg2rad(self.y_c)) * 2 * schema - return (data1 - data2) / d + d *= self.xstep * cos(deg2rad(self.y_c)) + if second: + return (data1 + data2 - 2 * data) / (d ** 2 / 4) + else: + return (data1 - data2) / d def compute_stencil( self, data, stencil_halfwidth=4, mode="reflect", vertical=False @@ -1787,13 +1790,14 @@ def compute_stencil( ) return ma.array(g, mask=m) - def add_uv_lagerloef(self, grid_height, uname="u", vname="v", schema=15): - self.add_uv(grid_height, uname, vname) + def add_uv_lagerloef(self, grid_height, uname="u", vname="v", schema=15, **kwargs): + self.add_uv(grid_height, uname, vname, **kwargs) latmax = 5 - _, (i_start, i_end) = self.nearest_grd_indice((0, 0), (-latmax, latmax)) + _, i_start = self.nearest_grd_indice(0, -latmax) + _, i_end = self.nearest_grd_indice(0, latmax) sl = slice(i_start, i_end) # Divide by sideral day - lat = self.y_c[sl] + lat = self.y_c gob = ( cos(deg2rad(lat)) * ones((self.x_c.shape[0], 1)) @@ -1807,39 +1811,26 @@ def add_uv_lagerloef(self, grid_height, uname="u", vname="v", schema=15): mode = "wrap" if self.is_circular() else "reflect" # fill data to compute a finite difference on all point - data = self.convolve_filter_with_dynamic_kernel( - grid_height, - self.kernel_bessel, - lat_max=10, - wave_length=500, - order=1, - extend=0.1, - ) - data = self.convolve_filter_with_dynamic_kernel( - data, self.kernel_bessel, lat_max=10, wave_length=500, order=1, extend=0.1 - ) - data = self.convolve_filter_with_dynamic_kernel( - data, self.kernel_bessel, lat_max=10, wave_length=500, order=1, extend=0.1 - ) + kw_filter = dict(kernel_func=self.kernel_bessel, order=1, extend=.1) + data = self.convolve_filter_with_dynamic_kernel(grid_height, wave_length=500, **kw_filter, lat_max=6+5+2+3) v_lagerloef = ( self.compute_finite_difference( - self.compute_finite_difference(data, mode=mode, schema=schema), - mode=mode, - schema=schema, - )[:, sl] - * gob - ) - u_lagerloef = ( - -self.compute_finite_difference( - self.compute_finite_difference(data, vertical=True, schema=schema), - vertical=True, - schema=schema, - )[:, sl] + self.compute_finite_difference(data, mode=mode, schema=1), + vertical=True, schema=1 + ) * gob ) - w = 1 - exp(-((lat / 2.2) ** 2)) - self.vars[vname][:, sl] = self.vars[vname][:, sl] * w + v_lagerloef * (1 - w) - self.vars[uname][:, sl] = self.vars[uname][:, sl] * w + u_lagerloef * (1 - w) + u_lagerloef = -self.compute_finite_difference(data, vertical=True, schema=schema, second=True) * gob + + v_lagerloef = self.convolve_filter_with_dynamic_kernel(v_lagerloef, wave_length=195, **kw_filter, lat_max=6 + 5 +2) + v_lagerloef = self.convolve_filter_with_dynamic_kernel(v_lagerloef, wave_length=416, **kw_filter, lat_max=6 + 5) + v_lagerloef = self.convolve_filter_with_dynamic_kernel(v_lagerloef, wave_length=416, **kw_filter, lat_max=6) + u_lagerloef = self.convolve_filter_with_dynamic_kernel(u_lagerloef, wave_length=195, **kw_filter, lat_max=6 + 5 +2) + u_lagerloef = self.convolve_filter_with_dynamic_kernel(u_lagerloef, wave_length=416, **kw_filter, lat_max=6 + 5) + u_lagerloef = self.convolve_filter_with_dynamic_kernel(u_lagerloef, wave_length=416, **kw_filter, lat_max=6) + w = 1 - exp(-((lat[sl] / 2.2) ** 2)) + self.vars[vname][:, sl] = self.vars[vname][:, sl] * w + v_lagerloef[:, sl] * (1 - w) + self.vars[uname][:, sl] = self.vars[uname][:, sl] * w + u_lagerloef[:, sl] * (1 - w) def add_uv(self, grid_height, uname="u", vname="v", stencil_halfwidth=4): r"""Compute a u and v grid From f6ae4ee6d6d21e70dd75344e85617978b6306637 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:14:20 +0100 Subject: [PATCH 104/115] fix matplotlib and zarr version --- environment.yml | 1 - requirements.txt | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/environment.yml b/environment.yml index 12ce70e7..e9a05ac0 100644 --- a/environment.yml +++ b/environment.yml @@ -1,7 +1,6 @@ name: binder-pyeddytracker channels: - conda-forge - - defaults dependencies: - python=3.10 - pip diff --git a/requirements.txt b/requirements.txt index 4c8af099..556cabbf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ -matplotlib +matplotlib < 3.8 # need an update of contour management opencv-python pint polygon3 pyyaml requests scipy -zarr +zarr < 3.0 netCDF4 numpy numba \ No newline at end of file From 66f2f313187dd30920db8d80a0444aff04259443 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Fri, 15 Nov 2024 16:18:09 +0100 Subject: [PATCH 105/115] add requirement on environement doc --- doc/environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/environment.yml b/doc/environment.yml index 89fcbe9c..065c1027 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -1,11 +1,11 @@ channels: - conda-forge - - defaults dependencies: - python=3.10 - ffmpeg - pip - pip: + - -r ../requirements.txt - sphinx-gallery - sphinx_rtd_theme - sphinx>=3.1 From 449b7f87292adbcbd9fb4c2b71a4890176282066 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 19 Nov 2024 10:09:53 +0100 Subject: [PATCH 106/115] update python version condition --- doc/environment.yml | 2 +- environment.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/environment.yml b/doc/environment.yml index 065c1027..7cf02b76 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -1,7 +1,7 @@ channels: - conda-forge dependencies: - - python=3.10 + - python>=3.10 - ffmpeg - pip - pip: diff --git a/environment.yml b/environment.yml index e9a05ac0..819d28d7 100644 --- a/environment.yml +++ b/environment.yml @@ -2,7 +2,7 @@ name: binder-pyeddytracker channels: - conda-forge dependencies: - - python=3.10 + - python>=3.10 - pip - ffmpeg - pip: From 99a848fa0cd38693ee5d62ac6a2e6a2324996cdb Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:24:34 +0100 Subject: [PATCH 107/115] solve doc problem (#255) * reject python 3.13 * use pip instead of setuptools * add sample id to install --- .readthedocs.yml | 2 +- doc/environment.yml | 3 ++- environment.yml | 2 +- .../pet_eddy_detection_ACC.py | 3 ++- src/py_eddy_tracker/data/__init__.py | 19 +++++++++++++------ 5 files changed, 19 insertions(+), 10 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index ddfbb747..5ac02e12 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,5 +7,5 @@ build: python: "mambaforge-latest" python: install: - - method: setuptools + - method: pip path: . diff --git a/doc/environment.yml b/doc/environment.yml index 7cf02b76..063a60de 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -1,11 +1,12 @@ channels: - conda-forge dependencies: - - python>=3.10 + - python>=3.10, <3.13 - ffmpeg - pip - pip: - -r ../requirements.txt + - git+https://github.com/AntSimi/py-eddy-tracker-sample-id.git - sphinx-gallery - sphinx_rtd_theme - sphinx>=3.1 diff --git a/environment.yml b/environment.yml index 819d28d7..e94c7bc1 100644 --- a/environment.yml +++ b/environment.yml @@ -2,7 +2,7 @@ name: binder-pyeddytracker channels: - conda-forge dependencies: - - python>=3.10 + - python>=3.10, <3.13 - pip - ffmpeg - pip: diff --git a/examples/02_eddy_identification/pet_eddy_detection_ACC.py b/examples/02_eddy_identification/pet_eddy_detection_ACC.py index 3d3d4ac1..d12c62f3 100644 --- a/examples/02_eddy_identification/pet_eddy_detection_ACC.py +++ b/examples/02_eddy_identification/pet_eddy_detection_ACC.py @@ -7,6 +7,7 @@ Two detections are provided : with a filtered ADT and without filtering """ + from datetime import datetime from matplotlib import pyplot as plt, style @@ -80,7 +81,7 @@ def set_fancy_labels(fig, ticklabelsize=14, labelsize=14, labelweight="semibold" # Identification # ^^^^^^^^^^^^^^ # Run the identification step with slices of 2 mm -date = datetime(2016, 5, 15) +date = datetime(2019, 2, 23) kw_ident = dict( date=date, step=0.002, shape_error=70, sampling=30, uname="u", vname="v" ) diff --git a/src/py_eddy_tracker/data/__init__.py b/src/py_eddy_tracker/data/__init__.py index f14fee87..bf062983 100644 --- a/src/py_eddy_tracker/data/__init__.py +++ b/src/py_eddy_tracker/data/__init__.py @@ -8,6 +8,7 @@ 20160515 adt None None longitude latitude . \ --cut 800 --fil 1 """ + import io import lzma from os import path @@ -26,14 +27,20 @@ def get_remote_demo_sample(path): if path.endswith(".nc"): return io.BytesIO(content) else: - if path.endswith(".nc"): + try: + import py_eddy_tracker_sample_id + if path.endswith(".nc"): + return py_eddy_tracker_sample_id.get_remote_demo_sample(path) + content = open(py_eddy_tracker_sample_id.get_remote_demo_sample(f"{path}.tar.xz"), "rb").read() + except: + if path.endswith(".nc"): + content = requests.get( + f"https://github.com/AntSimi/py-eddy-tracker-sample-id/raw/master/{path}" + ).content + return io.BytesIO(content) content = requests.get( - f"https://github.com/AntSimi/py-eddy-tracker-sample-id/raw/master/{path}" + f"https://github.com/AntSimi/py-eddy-tracker-sample-id/raw/master/{path}.tar.xz" ).content - return io.BytesIO(content) - content = requests.get( - f"https://github.com/AntSimi/py-eddy-tracker-sample-id/raw/master/{path}.tar.xz" - ).content # Tar module could manage lzma tar, but it will apply uncompress for each extractfile tar = tarfile.open(mode="r", fileobj=io.BytesIO(lzma.decompress(content))) From 0a4ed708d87cc4073634beac8fb88a4b16747f0e Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Mon, 6 Jan 2025 16:18:47 +0100 Subject: [PATCH 108/115] bug correction toward direction #252 --- src/py_eddy_tracker/appli/network.py | 2 +- src/py_eddy_tracker/dataset/grid.py | 13 ++++++------- src/py_eddy_tracker/observations/tracking.py | 2 +- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/src/py_eddy_tracker/appli/network.py b/src/py_eddy_tracker/appli/network.py index b8c2da51..0a3d06ca 100644 --- a/src/py_eddy_tracker/appli/network.py +++ b/src/py_eddy_tracker/appli/network.py @@ -283,7 +283,7 @@ def previous_obs(d, i_seg): def display_compare(ref, others): def display(value, ref=None): if ref: - outs = [f"{v/ref[k] * 100:.1f}% ({v})" for k, v in value.items()] + outs = [f"{v / ref[k] * 100:.1f}% ({v})" for k, v in value.items()] else: outs = value return "".join([f"{v:^18}" for v in outs]) diff --git a/src/py_eddy_tracker/dataset/grid.py b/src/py_eddy_tracker/dataset/grid.py index d8a48f69..f15503b2 100644 --- a/src/py_eddy_tracker/dataset/grid.py +++ b/src/py_eddy_tracker/dataset/grid.py @@ -9,6 +9,7 @@ from matplotlib.path import Path as BasePath from netCDF4 import Dataset from numba import njit, prange, types as numba_types +import numpy as np from numpy import ( arange, array, @@ -35,7 +36,6 @@ percentile, pi, radians, - round_, sin, sinc, sqrt, @@ -2251,12 +2251,11 @@ def compute_pixel_path(x0, y0, x1, y1, x_ori, y_ori, x_step, y_step, nb_x): i_x1 = empty(nx, dtype=numba_types.int_) i_y0 = empty(nx, dtype=numba_types.int_) i_y1 = empty(nx, dtype=numba_types.int_) - # Because round_ is not accepted with array in numba for i in range(nx): - i_x0[i] = round_(((x0[i] - x_ori) % 360) / x_step) - i_x1[i] = round_(((x1[i] - x_ori) % 360) / x_step) - i_y0[i] = round_((y0[i] - y_ori) / y_step) - i_y1[i] = round_((y1[i] - y_ori) / y_step) + i_x0[i] = np.round(((x0[i] - x_ori) % 360) / x_step) + i_x1[i] = np.round(((x1[i] - x_ori) % 360) / x_step) + i_y0[i] = np.round((y0[i] - y_ori) / y_step) + i_y1[i] = np.round((y1[i] - y_ori) / y_step) # Delta index of x d_x = i_x1 - i_x0 d_x = (d_x + nb_x // 2) % nb_x - (nb_x // 2) @@ -2941,7 +2940,7 @@ def compute_stencil(x, y, h, m, earth_radius, vertical=False, stencil_halfwidth= h_3, h_2, h_1, h0 = h[-4, j], h[-3, j], h[-2, j], h[-1, j] m_3, m_2, m_1, m0 = m[-4, j], m[-3, j], m[-2, j], m[-1, j] else: - m_3, m_2, m_1, m0 = False, False, False, False + m_3, m_2, m_1, m0 = True, True, True, True h1, h2, h3, h4 = h[0, j], h[1, j], h[2, j], h[3, j] m1, m2, m3, m4 = m[0, j], m[1, j], m[2, j], m[3, j] for i in range(nb_x): diff --git a/src/py_eddy_tracker/observations/tracking.py b/src/py_eddy_tracker/observations/tracking.py index 164f9724..fa1c1f93 100644 --- a/src/py_eddy_tracker/observations/tracking.py +++ b/src/py_eddy_tracker/observations/tracking.py @@ -380,7 +380,7 @@ def extract_toward_direction(self, west=True, delta_lon=None): d_lon = lon[i1] - lon[i0] m = d_lon < 0 if west else d_lon > 0 if delta_lon is not None: - m *= delta_lon < d_lon + m *= delta_lon < abs(d_lon) m = m.repeat(nb) return self.extract_with_mask(m) From c069878e3f69cd908c8bef6906f05ca4f015ef0e Mon Sep 17 00:00:00 2001 From: Antoine Delepoulle <36040805+AntSimi@users.noreply.github.com> Date: Wed, 20 Nov 2024 15:24:34 +0100 Subject: [PATCH 109/115] Add method to get period information for each network --- src/py_eddy_tracker/observations/network.py | 34 +++++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index a2e2daed..87ef4a78 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -9,6 +9,7 @@ import netCDF4 from numba import njit, types as nb_types from numba.typed import List +import numpy as np from numpy import ( arange, array, @@ -124,7 +125,7 @@ def __repr__(self): f"Atlas with {self.nb_network} networks ({self.nb_network / period:0.0f} networks/year)," f" {self.nb_segment} segments ({self.nb_segment / period:0.0f} segments/year), {len(self)} observations ({len(self) / period:0.0f} observations/year)", f" {m_event.size} merging ({m_event.size / period:0.0f} merging/year), {s_event.size} splitting ({s_event.size / period:0.0f} splitting/year)", - f" with {(nb_by_network > big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network> big].sum()} observations cumulate)", + f" with {(nb_by_network > big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network > big].sum()} observations cumulate)", f" {nb_trash} observations in trash", ] return "\n".join(infos) @@ -225,6 +226,12 @@ def network_size(self, id_networks=None): i = id_networks - self.index_network[2] return self.index_network[1][i] - self.index_network[0][i] + def networks_period(self): + """ + Return period for each network + """ + return get_period_with_index(self.time, *self.index_network[:2]) + def unique_segment_to_id(self, id_unique): """Return id network and id segment for a unique id @@ -1788,8 +1795,8 @@ def date2file(julian_day): ) logger.info( ( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" - f" : {time.time()-_timestamp:5.2f}s" + f"coherence {_t} / {range_end - 1} ({(_t - range_start) / (range_end - range_start - 1):.1%})" + f" : {time.time() - _timestamp:5.2f}s" ) ) @@ -1865,8 +1872,8 @@ def date2file(julian_day): ) logger.info( ( - f"coherence {_t} / {range_end-1} ({(_t - range_start) / (range_end - range_start-1):.1%})" - f" : {time.time()-_timestamp:5.2f}s" + f"coherence {_t} / {range_end - 1} ({(_t - range_start) / (range_end - range_start - 1):.1%})" + f" : {time.time() - _timestamp:5.2f}s" ) ) return itf_final, ptf_final @@ -2065,7 +2072,7 @@ def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): nb_alone, nb_obs, nb_gr = (gr == self.NOGROUP).sum(), len(gr), len(unique(gr)) logger.info( f"{nb_alone} alone / {nb_obs} obs, {nb_gr} groups, " - f"{nb_alone *100./nb_obs:.2f} % alone, {(nb_obs - nb_alone) / (nb_gr - 1):.1f} obs/group" + f"{nb_alone * 100. / nb_obs:.2f} % alone, {(nb_obs - nb_alone) / (nb_gr - 1):.1f} obs/group" ) return gr @@ -2316,3 +2323,18 @@ def mask_follow_obs(m, next_obs, time, indexs, dt=3): m[i_next] = True i_next = next_obs[i_next] dt_ = abs(time[i_next] - t0) + + +@njit(cache=True) +def get_period_with_index(t, i0, i1): + """Return peek to peek cover by each slice define by i0 and i1 + + :param array t: array which contain values to estimate spread + :param array i0: index which determine start of slice + :param array i1: index which determine end of slice + :return array: Peek to peek of t + """ + periods = np.empty(i0.size, t.dtype) + for i in range(i0.size): + periods[i] = t[i0[i] : i1[i]].ptp() + return periods From 7ad635def9c26d02f2b00cf4f8c71ff4af449e98 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Fri, 10 Jan 2025 15:46:10 +0100 Subject: [PATCH 110/115] In cas of empty slice continue and set period to 0 --- src/py_eddy_tracker/observations/network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 87ef4a78..393dae78 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -2336,5 +2336,8 @@ def get_period_with_index(t, i0, i1): """ periods = np.empty(i0.size, t.dtype) for i in range(i0.size): + if i1[i] == i0[i]: + periods[i] = 0 + continue periods[i] = t[i0[i] : i1[i]].ptp() return periods From 1c42ae3358e62055646c11df1d2916de8c3dd941 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Tue, 28 Jan 2025 15:52:09 +0100 Subject: [PATCH 111/115] add docs conf.py --- .readthedocs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index 5ac02e12..ba36f8ea 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -9,3 +9,5 @@ python: install: - method: pip path: . +sphinx: + configuration: doc/conf.py \ No newline at end of file From a39435dd2a02b0f313765452bdaebe39203769cf Mon Sep 17 00:00:00 2001 From: "J. Gamot" Date: Thu, 16 Jan 2025 17:07:06 +0100 Subject: [PATCH 112/115] modifications network file --- src/py_eddy_tracker/appli/gui.py | 7 ++- src/py_eddy_tracker/observations/network.py | 63 ++++++++++++++++++--- 2 files changed, 60 insertions(+), 10 deletions(-) diff --git a/src/py_eddy_tracker/appli/gui.py b/src/py_eddy_tracker/appli/gui.py index 4a8cdeb0..c3d7619b 100644 --- a/src/py_eddy_tracker/appli/gui.py +++ b/src/py_eddy_tracker/appli/gui.py @@ -11,7 +11,7 @@ from matplotlib.animation import FuncAnimation from matplotlib.axes import Axes from matplotlib.collections import LineCollection -from numpy import arange, where +from numpy import arange, where, nan from .. import EddyParser from ..gui import GUI @@ -58,7 +58,10 @@ def setup( self.kw_label["fontweight"] = kwargs.pop("fontweight", "demibold") # To text each visible eddy if field_txt: - self.field_txt = self.eddy[field_txt] + if isinstance(field_txt,str): + self.field_txt = self.eddy[field_txt] + else : + self.field_txt=field_txt if field_color: # To color each visible eddy self.field_color = self.eddy[field_color].astype("f4") diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index 393dae78..ef0b02b4 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -5,7 +5,8 @@ from glob import glob import logging import time - +from datetime import timedelta, datetime +import os import netCDF4 from numba import njit, types as nb_types from numba.typed import List @@ -16,6 +17,7 @@ bincount, bool_, concatenate, + empty, nan, ones, @@ -120,6 +122,7 @@ def __repr__(self): period = (self.period[1] - self.period[0]) / 365.25 nb_by_network = self.network_size() nb_trash = 0 if self.ref_index != 0 else nb_by_network[0] + lifetime=self.lifetime big = 50_000 infos = [ f"Atlas with {self.nb_network} networks ({self.nb_network / period:0.0f} networks/year)," @@ -127,6 +130,7 @@ def __repr__(self): f" {m_event.size} merging ({m_event.size / period:0.0f} merging/year), {s_event.size} splitting ({s_event.size / period:0.0f} splitting/year)", f" with {(nb_by_network > big).sum()} network with more than {big} obs and the biggest have {nb_by_network.max()} observations ({nb_by_network[nb_by_network > big].sum()} observations cumulate)", f" {nb_trash} observations in trash", + f" {lifetime.max()} days max of lifetime", ] return "\n".join(infos) @@ -201,6 +205,13 @@ def ref_segment_track_index(self): @property def ref_index(self): return self.index_network[2] + + @property + def lifetime(self): + """Return lifetime for each observation""" + lt=self.networks_period.astype("int") + nb_by_network=self.network_size() + return lt.repeat(nb_by_network) def network_segment_size(self, id_networks=None): """Get number of segment by network @@ -226,12 +237,15 @@ def network_size(self, id_networks=None): i = id_networks - self.index_network[2] return self.index_network[1][i] - self.index_network[0][i] + @property def networks_period(self): """ Return period for each network """ return get_period_with_index(self.time, *self.index_network[:2]) + + def unique_segment_to_id(self, id_unique): """Return id network and id segment for a unique id @@ -281,7 +295,7 @@ def astype(self, cls): new[k][:] = self[k][:] new.sign_type = self.sign_type return new - + def longer_than(self, nb_day_min=-1, nb_day_max=-1): """ Select network on time duration @@ -1132,23 +1146,29 @@ def segment_track_array(self): self._segment_track_array = build_unique_array(self.segment, self.track) return self._segment_track_array - def birth_event(self): + def birth_event(self, only_index=False): """Extract birth events.""" i_start, _, _ = self.index_segment_track indices = i_start[self.previous_obs[i_start] == -1] if self.first_is_trash(): indices = indices[1:] - return self.extract_event(indices) - + if only_index: + return indices + else : + return self.extract_event(indices) + generation_event = birth_event - def death_event(self): + def death_event(self, only_index=False): """Extract death events.""" _, i_stop, _ = self.index_segment_track indices = i_stop[self.next_obs[i_stop - 1] == -1] - 1 if self.first_is_trash(): indices = indices[1:] - return self.extract_event(indices) + if only_index: + return indices + else : + return self.extract_event(indices) dissipation_event = death_event @@ -1459,7 +1479,7 @@ def plot(self, ax, ref=None, color_cycle=None, **kwargs): j += 1 return mappables - def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): + def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None, return_mask=False): """ Remove short segments that don't connect several segments @@ -1485,6 +1505,8 @@ def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): ) # get mask for selected obs m = ~self.segment_mask(segments_keep) + if return_mask: + return ~m self.track[m] = 0 self.segment[m] = 0 self.previous_obs[m] = -1 @@ -1502,6 +1524,8 @@ def remove_dead_end(self, nobs=3, ndays=0, recursive=0, mask=None): self.sort() if recursive > 0: self.remove_dead_end(nobs, ndays, recursive - 1) + + def extract_segment(self, segments, absolute=False): """Extract given segments @@ -2042,6 +2066,29 @@ def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): results, nb_obs = list(), list() # To display print only in INFO display_iteration = logger.getEffectiveLevel() == logging.INFO + + + # Trier les fichiers par date + def extract_date(file): + filename = os.path.basename(file) + date_str = filename.split('_')[-1].split('.')[0] # Extraire la partie date (ex : "20180101") + return datetime.strptime(date_str, "%Y%m%d") # Convertir en objet datetime + self.filenames = sorted(self.filenames, key=extract_date) + + # Detect missing date and print them to inform the user which files are missing + missing_dates = [] + dates_list = [extract_date(self.filenames[i]) for i in range(len(self.filenames))] + for i in range(len(dates_list) - 1): + expected_date = dates_list[i] + timedelta(days=1) + while expected_date < dates_list[i + 1]: + missing_dates.append(expected_date) + expected_date += timedelta(days=1) + if missing_dates: + missing_str = ', '.join(date.strftime("%Y-%m-%d") for date in missing_dates) + raise Exception(f"Following files missing : {missing_str}") + else: + print("No missing files") + for i, filename in enumerate(self.filenames): if display_iteration: print(f"{filename} compared to {self.window} next", end="\r") From f7f820d3746cc137f14ccb5853599c26dd80cf93 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Wed, 29 Jan 2025 16:17:44 +0100 Subject: [PATCH 113/115] Move out argsort from numba to speed up --- src/py_eddy_tracker/generic.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/py_eddy_tracker/generic.py b/src/py_eddy_tracker/generic.py index 612def68..2fdb737a 100644 --- a/src/py_eddy_tracker/generic.py +++ b/src/py_eddy_tracker/generic.py @@ -615,7 +615,6 @@ def build_circle(x0, y0, r): return x_norm * r + x0, y_norm * r + y0 -@njit(cache=True) def window_index(x, x0, half_window=1): """ Give for a fixed half_window each start and end index for each x0, in @@ -626,7 +625,12 @@ def window_index(x, x0, half_window=1): :param float half_window: half window """ # Sort array, bounds will be sort also - i_ordered = x.argsort() + i_ordered = x.argsort(kind="mergesort") + return window_index_(x, i_ordered, x0, half_window) + + +@njit(cache=True) +def window_index_(x, i_ordered, x0, half_window=1): nb_x, nb_pt = x.size, x0.size first_index = empty(nb_pt, dtype=i_ordered.dtype) last_index = empty(nb_pt, dtype=i_ordered.dtype) From 8fe5bf10e1bc973c83dd114b12e19c37b2f27edc Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Thu, 30 Jan 2025 11:59:41 +0100 Subject: [PATCH 114/115] Remove check on missing file, code was build with bad hypothesis --- src/py_eddy_tracker/observations/network.py | 22 --------------------- 1 file changed, 22 deletions(-) diff --git a/src/py_eddy_tracker/observations/network.py b/src/py_eddy_tracker/observations/network.py index ef0b02b4..f0b9d7cc 100644 --- a/src/py_eddy_tracker/observations/network.py +++ b/src/py_eddy_tracker/observations/network.py @@ -2067,28 +2067,6 @@ def group_observations(self, min_overlap=0.2, minimal_area=False, **kwargs): # To display print only in INFO display_iteration = logger.getEffectiveLevel() == logging.INFO - - # Trier les fichiers par date - def extract_date(file): - filename = os.path.basename(file) - date_str = filename.split('_')[-1].split('.')[0] # Extraire la partie date (ex : "20180101") - return datetime.strptime(date_str, "%Y%m%d") # Convertir en objet datetime - self.filenames = sorted(self.filenames, key=extract_date) - - # Detect missing date and print them to inform the user which files are missing - missing_dates = [] - dates_list = [extract_date(self.filenames[i]) for i in range(len(self.filenames))] - for i in range(len(dates_list) - 1): - expected_date = dates_list[i] + timedelta(days=1) - while expected_date < dates_list[i + 1]: - missing_dates.append(expected_date) - expected_date += timedelta(days=1) - if missing_dates: - missing_str = ', '.join(date.strftime("%Y-%m-%d") for date in missing_dates) - raise Exception(f"Following files missing : {missing_str}") - else: - print("No missing files") - for i, filename in enumerate(self.filenames): if display_iteration: print(f"{filename} compared to {self.window} next", end="\r") From 229543fdf7d0a8a3e1de6c40d769185cd85ca1b7 Mon Sep 17 00:00:00 2001 From: "A. Delepoulle" <36040805+AntSimi@users.noreply.github.com> Date: Fri, 6 Jun 2025 10:33:16 +0200 Subject: [PATCH 115/115] update changelog --- CHANGELOG.rst | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index f8eee72f..6d6d6a30 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -11,14 +11,23 @@ and this project adheres to `Semantic Versioning