Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions examples/07_cube_manipulation/pet_fsle_med.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@
# %%
# ADT in med
# ----------
# :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is
# made for data stores in time cube, you could use also
# :py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to
# load data-cube from multiple file.
c = GridCollection.from_netcdf_cube(
get_demo_path("dt_med_allsat_phy_l4_2005T2.nc"),
"longitude",
Expand Down
10 changes: 5 additions & 5 deletions examples/16_network/pet_ioannou_2017_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from py_eddy_tracker.gui import GUI_AXES
from py_eddy_tracker.observations.network import NetworkObservations

from py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates
from py_eddy_tracker.generic import coordinates_to_local
from py_eddy_tracker.poly import fit_ellipse

# %%
Expand Down Expand Up @@ -217,22 +217,22 @@ def update_axes(ax, mappable=None):
# Theta
ax = timeline_axes()
m = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi / 2, vmax=pi / 2, cmap="hsv")
cb = update_axes(ax, m["scatter"])
_ = update_axes(ax, m["scatter"])

# %%
# a
ax = timeline_axes()
m = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r")
cb = update_axes(ax, m["scatter"])
_ = update_axes(ax, m["scatter"])

# %%
# b
ax = timeline_axes()
m = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap="Spectral_r")
cb = update_axes(ax, m["scatter"])
_ = update_axes(ax, m["scatter"])

# %%
# a/b
ax = timeline_axes()
m = close_to_i3.scatter_timeline(ax, a_ / b_, vmin=1, vmax=2, cmap="Spectral_r")
cb = update_axes(ax, m["scatter"])
_ = update_axes(ax, m["scatter"])
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Eddies files (zarr or netcdf) could be loaded with ```load_file``` method:\n\n"
"Eddies files (zarr or netcdf) can be loaded with ```load_file``` method:\n\n"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## ADT in med\n\n"
"## ADT in med\n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_cube` method is\nmade for data stores in time cube, you could use also \n:py:meth:`~py_eddy_tracker.dataset.grid.GridCollection.from_netcdf_list` method to\nload data-cube from multiple file.\n\n"
]
},
{
Expand Down Expand Up @@ -172,7 +172,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
"version": "3.9.2"
}
},
"nbformat": 4,
Expand Down
12 changes: 6 additions & 6 deletions notebooks/python_module/14_generic_tools/pet_fit_contour.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
},
"outputs": [],
"source": [
"from matplotlib import pyplot as plt\nfrom numpy import cos, linspace, radians, sin\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates\nfrom py_eddy_tracker.observations.observation import EddiesObservations\nfrom py_eddy_tracker.poly import fit_circle_, fit_ellips"
"from matplotlib import pyplot as plt\nfrom numpy import cos, linspace, radians, sin\n\nfrom py_eddy_tracker import data\nfrom py_eddy_tracker.generic import coordinates_to_local, local_to_coordinates\nfrom py_eddy_tracker.observations.observation import EddiesObservations\nfrom py_eddy_tracker.poly import fit_circle_, fit_ellipse"
]
},
{
Expand All @@ -51,7 +51,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Function to draw circle or ellips from parameter\n\n"
"Function to draw circle or ellipse from parameter\n\n"
]
},
{
Expand All @@ -62,14 +62,14 @@
},
"outputs": [],
"source": [
"def build_circle(x0, y0, r):\n angle = radians(linspace(0, 360, 50))\n x_norm, y_norm = cos(angle), sin(angle)\n return local_to_coordinates(x_norm * r, y_norm * r, x0, y0)\n\n\ndef build_ellips(x0, y0, a, b, theta):\n angle = radians(linspace(0, 360, 50))\n x = a * cos(theta) * cos(angle) - b * sin(theta) * sin(angle)\n y = a * sin(theta) * cos(angle) + b * cos(theta) * sin(angle)\n return local_to_coordinates(x, y, x0, y0)"
"def build_circle(x0, y0, r):\n angle = radians(linspace(0, 360, 50))\n x_norm, y_norm = cos(angle), sin(angle)\n return local_to_coordinates(x_norm * r, y_norm * r, x0, y0)\n\n\ndef build_ellipse(x0, y0, a, b, theta):\n angle = radians(linspace(0, 360, 50))\n x = a * cos(theta) * cos(angle) - b * sin(theta) * sin(angle)\n y = a * sin(theta) * cos(angle) + b * cos(theta) * sin(angle)\n return local_to_coordinates(x, y, x0, y0)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Plot fitted circle or ellips on stored contour\n\n"
"Plot fitted circle or ellipse on stored contour\n\n"
]
},
{
Expand All @@ -80,7 +80,7 @@
},
"outputs": [],
"source": [
"xs, ys = a.contour_lon_s, a.contour_lat_s\n\nfig = plt.figure(figsize=(15, 15))\n\nj = 1\nfor i in range(0, 800, 30):\n x, y = xs[i], ys[i]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n ax = fig.add_subplot(4, 4, j)\n ax.grid(), ax.set_aspect(\"equal\")\n ax.plot(x, y, label=\"store\", color=\"black\")\n x0, y0, a, b, theta = fit_ellips(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_ellips(x0, y0, a, b, theta), label=\"ellips\", color=\"green\")\n x0, y0, radius, shape_error = fit_circle_(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_circle(x0, y0, radius), label=\"circle\", color=\"red\", lw=0.5)\n if j == 16:\n break\n j += 1"
"xs, ys = a.contour_lon_s, a.contour_lat_s\n\nfig = plt.figure(figsize=(15, 15))\n\nj = 1\nfor i in range(0, 800, 30):\n x, y = xs[i], ys[i]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n ax = fig.add_subplot(4, 4, j)\n ax.grid(), ax.set_aspect(\"equal\")\n ax.plot(x, y, label=\"store\", color=\"black\")\n x0, y0, a, b, theta = fit_ellipse(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_ellipse(x0, y0, a, b, theta), label=\"ellipse\", color=\"green\")\n x0, y0, radius, shape_error = fit_circle_(x_, y_)\n x0, y0 = local_to_coordinates(x0, y0, x0_, y0_)\n ax.plot(*build_circle(x0, y0, radius), label=\"circle\", color=\"red\", lw=0.5)\n if j == 16:\n break\n j += 1"
]
}
],
Expand All @@ -100,7 +100,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
"version": "3.9.2"
}
},
"nbformat": 4,
Expand Down
6 changes: 3 additions & 3 deletions notebooks/python_module/16_network/pet_follow_particle.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
},
"outputs": [],
"source": [
"class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is use to create thumbnail which are not use but consume same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)"
"class VideoAnimation(FuncAnimation):\n def _repr_html_(self, *args, **kwargs):\n \"\"\"To get video in html and have a player\"\"\"\n content = self.to_html5_video()\n return re.sub(\n r'width=\"[0-9]*\"\\sheight=\"[0-9]*\"', 'width=\"100%\" height=\"100%\"', content\n )\n\n def save(self, *args, **kwargs):\n if args[0].endswith(\"gif\"):\n # In this case gif is used to create thumbnail which are not used but consumes same time than video\n # So we create an empty file, to save time\n with open(args[0], \"w\") as _:\n pass\n return\n return super().save(*args, **kwargs)"
]
},
{
Expand Down Expand Up @@ -120,7 +120,7 @@
},
"outputs": [],
"source": [
"def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies(only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle(in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and filled target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particle in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particle from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct"
"def advect(x, y, c, t0, delta_t):\n \"\"\"\n Advect particle from t0 to t0 + delta_t, with data cube.\n \"\"\"\n kw = dict(nb_step=6, time_step=86400 / 6)\n if delta_t < 0:\n kw[\"backward\"] = True\n delta_t = -delta_t\n p = c.advect(x, y, \"u\", \"v\", t_init=t0, **kw)\n for _ in range(delta_t):\n t, x, y = p.__next__()\n return t, x, y\n\n\ndef particle_candidate(x, y, c, eddies, t_start, i_target, pct, **kwargs):\n # Obs from initial time\n m_start = eddies.time == t_start\n e = eddies.extract_with_mask(m_start)\n # to be able to get global index\n translate_start = where(m_start)[0]\n # Identify particle in eddies (only in core)\n i_start = e.contains(x, y, intern=True)\n m = i_start != -1\n x, y, i_start = x[m], y[m], i_start[m]\n # Advect\n t_end, x, y = advect(x, y, c, t_start, **kwargs)\n # eddies at last date\n m_end = eddies.time == t_end / 86400\n e_end = eddies.extract_with_mask(m_end)\n # to be able to get global index\n translate_end = where(m_end)[0]\n # Id eddies for each alive particle (in core and extern)\n i_end = e_end.contains(x, y)\n # compute matrix and fill target array\n get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct)\n\n\n@njit(cache=True)\ndef get_matrix(i_start, i_end, translate_start, translate_end, i_target, pct):\n nb_start, nb_end = translate_start.size, translate_end.size\n # Matrix which will store count for every couple\n count = zeros((nb_start, nb_end), dtype=nb_types.int32)\n # Number of particles in each origin observation\n ref = zeros(nb_start, dtype=nb_types.int32)\n # For each particle\n for i in range(i_start.size):\n i_end_ = i_end[i]\n i_start_ = i_start[i]\n if i_end_ != -1:\n count[i_start_, i_end_] += 1\n ref[i_start_] += 1\n for i in range(nb_start):\n for j in range(nb_end):\n pct_ = count[i, j]\n # If there are particles from i to j\n if pct_ != 0:\n # Get percent\n pct_ = pct_ / ref[i] * 100.0\n # Get indices in full dataset\n i_, j_ = translate_start[i], translate_end[j]\n pct_0 = pct[i_, 0]\n if pct_ > pct_0:\n pct[i_, 1] = pct_0\n pct[i_, 0] = pct_\n i_target[i_, 1] = i_target[i_, 0]\n i_target[i_, 0] = j_\n elif pct_ > pct[i_, 1]:\n pct[i_, 1] = pct_\n i_target[i_, 1] = j_\n return i_target, pct"
]
},
{
Expand Down Expand Up @@ -169,7 +169,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.7"
"version": "3.9.2"
}
},
"nbformat": 4,
Expand Down
92 changes: 91 additions & 1 deletion notebooks/python_module/16_network/pet_ioannou_2017_case.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
},
"outputs": [],
"source": [
"import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations"
"import re\nfrom datetime import datetime, timedelta\n\nfrom matplotlib import colors\nfrom matplotlib import pyplot as plt\nfrom matplotlib.animation import FuncAnimation\nfrom matplotlib.ticker import FuncFormatter\nfrom numpy import arange, where, array, pi\n\nfrom py_eddy_tracker.appli.gui import Anim\nfrom py_eddy_tracker.data import get_demo_path\nfrom py_eddy_tracker.gui import GUI_AXES\nfrom py_eddy_tracker.observations.network import NetworkObservations\n\nfrom py_eddy_tracker.generic import coordinates_to_local\nfrom py_eddy_tracker.poly import fit_ellipse"
]
},
{
Expand Down Expand Up @@ -230,6 +230,96 @@
"source": [
"ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, \"shape_error_e\", vmin=14, vmax=70, **kw)\ncb = update_axes(ax, m[\"scatter\"])\ncb.set_label(\"Effective shape error\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Rotation angle\nFor each obs, fit an ellipse to the contour, with theta the angle from the x-axis,\na the semi ax in x direction and b the semi ax in y dimension\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"theta_ = list()\na_ = list()\nb_ = list()\nfor obs in close_to_i3:\n x, y = obs[\"contour_lon_s\"], obs[\"contour_lat_s\"]\n x0_, y0_ = x.mean(), y.mean()\n x_, y_ = coordinates_to_local(x, y, x0_, y0_)\n x0, y0, a, b, theta = fit_ellipse(x_, y_)\n theta_.append(theta)\n a_.append(a)\n b_.append(b)\na_ = array(a_)\nb_ = array(b_)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Theta\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, theta_, vmin=-pi / 2, vmax=pi / 2, cmap=\"hsv\")\n_ = update_axes(ax, m[\"scatter\"])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"a\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, a_ * 1e-3, vmin=0, vmax=80, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"b\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, b_ * 1e-3, vmin=0, vmax=80, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"a/b\n\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"ax = timeline_axes()\nm = close_to_i3.scatter_timeline(ax, a_ / b_, vmin=1, vmax=2, cmap=\"Spectral_r\")\n_ = update_axes(ax, m[\"scatter\"])"
]
}
],
"metadata": {
Expand Down