Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions src/py_eddy_tracker/appli/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def build_network():


def divide_network():
parser = EddyParser("Separate path for a same group(network)")
parser = EddyParser("Separate path for a same group (network)")
parser.add_argument("input", help="input network file")
parser.add_argument("out", help="output file")
parser.contour_intern_arg()
Expand Down Expand Up @@ -66,7 +66,7 @@ def subset_network():
"--length",
nargs=2,
type=int,
help="Nb of day which must be cover by network, first minimum number of day and last maximum number of day,"
help="Nb of days that must be covered by the network, first minimum number of day and last maximum number of day,"
"if value is negative, this bound won't be used",
)
parser.add_argument(
Expand All @@ -85,8 +85,8 @@ def subset_network():
"--period",
nargs=2,
type=int,
help="Start day and end day, if it's negative value we will add to day min and add to day max,"
"if 0 it s not use",
help="Start day and end day, if it's a negative value we will add to day min and add to day max,"
"if 0 it is not used",
)
args = parser.parse_args()
n = NetworkObservations.load_file(args.input, raw_data=True)
Expand Down
134 changes: 67 additions & 67 deletions src/py_eddy_tracker/dataset/grid.py

Large diffs are not rendered by default.

46 changes: 23 additions & 23 deletions src/py_eddy_tracker/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
@njit(cache=True)
def count_consecutive(mask):
"""
Count consecutive event every False flag count restart
Count consecutive events every False flag count restart

:param array[bool] mask: event to count
:return: count when consecutive event
Expand All @@ -50,7 +50,7 @@ def count_consecutive(mask):
@njit(cache=True)
def reverse_index(index, nb):
"""
Compute a list of index, which are not in index.
Compute a list of indexes, which are not in index.

:param array index: index of group which will be set to False
:param array nb: Count for each group
Expand All @@ -65,10 +65,10 @@ def reverse_index(index, nb):

@njit(cache=True)
def build_index(groups):
"""We expected that variable is monotonous, and return index for each step change.
"""We expect that variable is monotonous, and return index for each step change.

:param array groups: array which contain group to be separated
:return: (first_index of each group, last_index of each group, value to shift group)
:param array groups: array that contains groups to be separated
:return: (first_index of each group, last_index of each group, value to shift groups)
:rtype: (array, array, int)

Examples
Expand All @@ -83,7 +83,7 @@ def build_index(groups):
for i, group in enumerate(groups[:-1]):
# Get next value to compare
next_group = groups[i + 1]
# if different we need to set index for all group between the 2 values
# if different we need to set index for all groups between the 2 values
if group != next_group:
first_index[group - i0 + 1 : next_group - i0 + 1] = i + 1
last_index = zeros(amplitude, dtype=numba_types.int_)
Expand All @@ -95,21 +95,21 @@ def build_index(groups):

@njit(cache=True)
def hist_numba(x, bins):
"""Call numba histogram to speed up."""
"""Call numba histogram to speed up."""
return histogram(x, bins)


@njit(cache=True, fastmath=True, parallel=False)
def distance_grid(lon0, lat0, lon1, lat1):
"""
Get distance for every couple of point.
Get distance for every couple of points.

:param array lon0:
:param array lat0:
:param array lon1:
:param array lat1:

:return: nan value for far away point, and km for other
:return: nan value for far away points, and km for other
:rtype: array
"""
nb_0 = lon0.shape[0]
Expand Down Expand Up @@ -164,7 +164,7 @@ def cumsum_by_track(field, track):
Cumsum by track.

:param array field: data to sum
:pram array(int) track: id of track to separate data
:pram array(int) track: id of trajectories to separate data
:return: cumsum with a reset at each start of track
:rtype: array
"""
Expand Down Expand Up @@ -192,7 +192,7 @@ def interp2d_geo(x_g, y_g, z_g, m_g, x, y, nearest=False):
:param array m_g: Boolean grid, True if value is masked
:param array x: coordinate where interpolate z
:param array y: coordinate where interpolate z
:param bool nearest: if true we will take nearest pixel
:param bool nearest: if True we will take nearest pixel
:return: z interpolated
:rtype: array
"""
Expand Down Expand Up @@ -256,17 +256,17 @@ def interp2d_bilinear(x_g, y_g, z_g, m_g, x, y):
nb_x = x_g.shape[0]
nb_y = y_g.shape[0]
is_circular = abs(x_g[-1] % 360 - (x_g[0] - x_step) % 360) < 1e-5
# Indices which should be never exist
# Indexes that should never exist
i0_old, j0_old, masked = -100000000, -10000000, False
z = empty(x.shape, dtype=z_g.dtype)
for i in prange(x.size):
x_ = (x[i] - x_ref) / x_step
y_ = (y[i] - y_ref) / y_step
i0 = int(floor(x_))
# To keep original value if wrapping apply to compute xd
# To keep original values if wrapping applied to compute xd
i0_ = i0
j0 = int(floor(y_))
# corner are the same need only a new xd and yd
# corners are the same need only a new xd and yd
if i0 != i0_old or j0 != j0_old:
i1 = i0 + 1
j1 = j0 + 1
Expand All @@ -288,7 +288,7 @@ def interp2d_bilinear(x_g, y_g, z_g, m_g, x, y):
z_g[i1, j1],
)
masked = False
# Need to be store only on change
# Need to be stored only on change
i0_old, j0_old = i0, j0
if masked:
z[i] = nan
Expand Down Expand Up @@ -359,17 +359,17 @@ def flatten_line_matrix(l_matrix):
@njit(cache=True)
def simplify(x, y, precision=0.1):
"""
Will remove all middle/end point which are closer than precision.
Will remove all middle/end points closer than precision.

:param array x:
:param array y:
:param float precision: if two points have distance inferior to precision with remove next point
:param float precision: if two points have distance inferior to precision we remove next point
:return: (x,y)
:rtype: (array,array)
"""
precision2 = precision ** 2
nb = x.shape[0]
# will be True for value keep
# will be True for kept values
mask = ones(nb, dtype=bool_)
for j in range(0, nb):
x_previous, y_previous = x[j], y[j]
Expand Down Expand Up @@ -423,7 +423,7 @@ def split_line(x, y, i):
:param y: array
:param i: array of int at each i change, we cut x, y

:return: x and y separate by nan at each i jump
:return: x and y separated by nan at each i jump
"""
nb_jump = len(where(i[1:] - i[:-1] != 0)[0])
nb_value = x.shape[0]
Expand All @@ -445,11 +445,11 @@ def split_line(x, y, i):
@njit(cache=True)
def wrap_longitude(x, y, ref, cut=False):
"""
Will wrap contiguous longitude with reference as west bound.
Will wrap contiguous longitude with reference as western boundary.

:param array x:
:param array y:
:param float ref: longitude of reference, all the new value will be between ref and ref + 360
:param float ref: longitude of reference, all the new values will be between ref and ref + 360
:param bool cut: if True line will be cut at the bounds
:return: lon,lat
:rtype: (array,array)
Expand Down Expand Up @@ -557,7 +557,7 @@ def local_to_coordinates(x, y, lon0, lat0):
@njit(cache=True, fastmath=True)
def nearest_grd_indice(x, y, x0, y0, xstep, ystep):
"""
Get nearest grid indice from a position.
Get nearest grid index from a position.

:param x: longitude
:param y: latitude
Expand All @@ -575,7 +575,7 @@ def nearest_grd_indice(x, y, x0, y0, xstep, ystep):
@njit(cache=True)
def bbox_indice_regular(vertices, x0, y0, xstep, ystep, N, circular, x_size):
"""
Get bbox indice of a contour in a regular grid.
Get bbox index of a contour in a regular grid.

:param vertices: vertice of contour
:param float x0: first grid longitude
Expand Down
26 changes: 13 additions & 13 deletions src/py_eddy_tracker/observations/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
def get_missing_indices(
array_time, array_track, dt=1, flag_untrack=True, indice_untrack=0
):
"""return indices where it misses values
"""Return indexes where values are missing

:param np.array(int) array_time : array of strictly increasing int representing time
:param np.array(int) array_track: N° track where observation belong
:param int,float dt: theorical timedelta between 2 observation
:param np.array(int) array_track: N° track where observations belong
:param int,float dt: theorical timedelta between 2 observations
:param bool flag_untrack: if True, ignore observations where n°track equal `indice_untrack`
:param int indice_untrack: n° representing where observations are untrack
:param int indice_untrack: n° representing where observations are untracked


ex : array_time = np.array([67, 68, 70, 71, 74, 75])
Expand Down Expand Up @@ -72,11 +72,11 @@ def fix_next_previous_obs(self):

@abstractmethod
def get_missing_indices(self, dt):
"find indices where observations is missing"
"Find indexes where observations are missing"
pass

def filled_by_interpolation(self, mask):
"""Filled selected values by interpolation
"""Fill selected values by interpolation

:param array(bool) mask: True if must be filled by interpolation

Expand All @@ -102,20 +102,20 @@ def filled_by_interpolation(self, mask):
)

def insert_virtual(self):
"""insert virtual observation on segments where observations were not found"""
"""insert virtual observations on segments where observations are missing"""

dt_theorical = median(self.time[1:] - self.time[:-1])
indices = self.get_missing_indices(dt_theorical)

logger.info("%d virtual observation will be added", indices.size)

# new observation size
# new observations size
size_obs_corrected = self.time.size + indices.size

# correction of indices for new size
# correction of indexes for new size
indices_corrected = indices + arange(indices.size)

# creating mask with indices
# creating mask with indexes
mask = zeros(size_obs_corrected, dtype=bool)
mask[indices_corrected] = 1

Expand All @@ -128,12 +128,12 @@ def insert_virtual(self):

def keep_tracks_by_date(self, date, nb_days):
"""
Find tracks which exist at date `date` and lasted at least `nb_days` after.
Find tracks that exist at date `date` and lasted at least `nb_days` after.

:param int,float date: date where the tracks must exist
:param int,float nb_days: number of time where the tracks must exist. Can be negative
:param int,float nb_days: number of times the tracks must exist. Can be negative

If nb_days is negative, it search a tracks which exist at the date,
If nb_days is negative, it searches a track that exists at the date,
but existed at least `nb_days` before the date
"""

Expand Down
Loading