Skip to content

Commit 91c97d4

Browse files
committed
Apply modification from a checker
1 parent 62f80cd commit 91c97d4

File tree

9 files changed

+81
-85
lines changed

9 files changed

+81
-85
lines changed

src/py_eddy_tracker/grid/__init__.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -343,7 +343,8 @@ def get_end(theend, shape, pad):
343343
self.slice_i_unpad = slice(abs(pad), -abs(pad))
344344
self.slice_i_pad = slice(ip0, ip1)
345345

346-
def haversine_dist(self, lon1, lat1, lon2, lat2):
346+
@staticmethod
347+
def haversine_dist(lon1, lat1, lon2, lat2):
347348
"""
348349
TO DO: change to use f2py version
349350
Haversine formula to calculate distance between two lon/lat points
@@ -367,7 +368,8 @@ def nearest_point(self, lon, lat):
367368
"""
368369
return nearest(lon, lat, self._lon[0], self._lat[:, 0])
369370

370-
def half_interp(self, h_one, h_two):
371+
@staticmethod
372+
def half_interp(h_one, h_two):
371373
"""
372374
Speed up frequent operations of type 0.5 * (arr[:-1] + arr[1:])
373375
"""
@@ -418,7 +420,7 @@ def uu2ur(uu_in, m_p, l_p):
418420
u_out[:, 1:-1] = self.half_interp(uu_in[:, :-1], uu_in[:, 1:])
419421
u_out[:, 0] = u_out[:, 1]
420422
u_out[:, -1] = u_out[:, -2]
421-
return (u_out.squeeze())
423+
return u_out.squeeze()
422424
mshp, lshp = uu_in.shape
423425
return uu2ur(uu_in, mshp, lshp + 1)
424426

@@ -429,7 +431,7 @@ def vv2vr(vv_in, m_p, l_p):
429431
v_out[1:-1] = self.half_interp(vv_in[:-1], vv_in[1:])
430432
v_out[0] = v_out[1]
431433
v_out[-1] = v_out[-2]
432-
return (v_out.squeeze())
434+
return v_out.squeeze()
433435
mshp, lshp = vv_in.shape
434436
return vv2vr(vv_in, mshp + 1, lshp)
435437

src/py_eddy_tracker/grid/aviso.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,8 +120,8 @@ def __init__(self, aviso_file, the_domain,
120120

121121
# zero_crossing, used for handling a longitude range that
122122
# crosses zero degree meridian
123-
if self.lonmin < 0 and self.lonmax >= 0 and 'MedSea' not in self.domain:
124-
if ((self.lonmax < self._lon.max()) and (self.lonmax > self._lon.min()) and (self.lonmin < self._lon.max()) and (self.lonmin > self._lon.min())):
123+
if self.lonmin < 0 <= self.lonmax and 'MedSea' not in self.domain:
124+
if (self.lonmax < self._lon.max()) and (self.lonmax > self._lon.min()) and (self.lonmin < self._lon.max()) and (self.lonmin > self._lon.min()):
125125
pass
126126
else:
127127
self.zero_crossing = True

src/py_eddy_tracker/observations.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def coherence(self, other):
247247
"""Check coherence between two dataset
248248
"""
249249
test = self.track_extra_variables == other.track_extra_variables
250-
test = self.track_array_variables == other.track_array_variables
250+
test *= self.track_array_variables == other.track_array_variables
251251
test *= self.array_variables == other.array_variables
252252
return test
253253

@@ -783,7 +783,7 @@ def create_variable(handler_nc, kwargs_variable, attr_variable,
783783
var.setncattr('min', var[:].min())
784784
var.setncattr('max', var[:].max())
785785
except ValueError:
786-
logging.warn('Data is empty')
786+
logging.warning('Data is empty')
787787

788788
def write_netcdf(self, path='./', filename='%(path)s/%(sign_type)s.nc'):
789789
"""Write a netcdf with eddy obs

src/py_eddy_tracker/property_functions.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -198,8 +198,7 @@ def get_uavg(eddy, contours, centlon_e, centlat_e, poly_eff, grd,
198198

199199
# 3. Respect size range
200200
mask_i_sum = poly_i.contains_points(points).sum()
201-
if not (mask_i_sum >= pixel_min and
202-
mask_i_sum <= eddy.pixel_threshold[1]):
201+
if not (pixel_min <= mask_i_sum <= eddy.pixel_threshold[1]):
203202
continue
204203
any_inner_contours = True
205204

@@ -353,8 +352,7 @@ def collection_loop(contours, grd, eddy, x_i=None, c_s_xi=None):
353352
xilon, xilat = c_s_xi.find_nearest_contour(
354353
centlon_e, centlat_e, pixel=False)[3:5]
355354
eddy_radius_e = distance(centlon_e, centlat_e, xilon, xilat)
356-
if not (eddy_radius_e >= eddy.radmin and
357-
eddy_radius_e <= eddy.radmax):
355+
if not (eddy.radmin <= eddy_radius_e <= eddy.radmax):
358356
continue
359357

360358
# Get indices of centroid

src/py_eddy_tracker/property_objects.py

Lines changed: 11 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -108,8 +108,7 @@ def mle(self):
108108
def within_amplitude_limits(self):
109109
"""
110110
"""
111-
return (self.amplitude >= self.eddy.ampmin and
112-
self.amplitude <= self.eddy.ampmax)
111+
return self.eddy.ampmin <= self.amplitude <= self.eddy.ampmax
113112

114113
def _set_cyc_amplitude(self):
115114
"""Get amplitude for cyclone
@@ -130,8 +129,7 @@ def all_pixels_below_h0(self, level):
130129
return False # i.e., with self.amplitude == 0
131130
else:
132131
self._set_local_extrema(1)
133-
if (self.local_extrema > 0 and
134-
self.local_extrema <= self.mle):
132+
if 0 < self.local_extrema <= self.mle:
135133
self._set_cyc_amplitude()
136134
elif self.local_extrema > self.mle:
137135
lmi_j, lmi_i = where(self.local_extrema_inds)
@@ -157,8 +155,7 @@ def all_pixels_above_h0(self, level):
157155
return False
158156
else:
159157
self._set_local_extrema(-1)
160-
if (self.local_extrema > 0 and
161-
self.local_extrema <= self.mle):
158+
if 0 < self.local_extrema <= self.mle:
162159
self._set_acyc_amplitude()
163160

164161
elif self.local_extrema > self.mle:
@@ -175,7 +172,7 @@ def all_pixels_above_h0(self, level):
175172
levnp2 = -1e5
176173
jmax += self.eddy.jmin
177174
imax += self.eddy.imin
178-
return (imax, jmax)
175+
return imax, jmax
179176
return False
180177

181178
def _set_local_extrema(self, sign):
@@ -260,15 +257,15 @@ def __init__(self, contours):
260257
self.x_value = empty(nb_pt, dtype=coord_dtype)
261258
self.y_value = empty(nb_pt, dtype=coord_dtype)
262259

263-
self.level_index = empty((nb_level), dtype='u4')
264-
self.nb_contour_per_level = empty((nb_level), dtype='u4')
260+
self.level_index = empty(nb_level, dtype='u4')
261+
self.nb_contour_per_level = empty(nb_level, dtype='u4')
265262

266-
self.nb_pt_per_contour = empty((nb_contour), dtype='u4')
263+
self.nb_pt_per_contour = empty(nb_contour, dtype='u4')
267264

268-
self.x_min_per_contour = empty((nb_contour), dtype=coord_dtype)
269-
self.x_max_per_contour = empty((nb_contour), dtype=coord_dtype)
270-
self.y_min_per_contour = empty((nb_contour), dtype=coord_dtype)
271-
self.y_max_per_contour = empty((nb_contour), dtype=coord_dtype)
265+
self.x_min_per_contour = empty(nb_contour, dtype=coord_dtype)
266+
self.x_max_per_contour = empty(nb_contour, dtype=coord_dtype)
267+
self.y_min_per_contour = empty(nb_contour, dtype=coord_dtype)
268+
self.y_max_per_contour = empty(nb_contour, dtype=coord_dtype)
272269

273270
#~ self._is_valid = empty((nb_contour), dtype='bool')
274271

src/py_eddy_tracker/tracking.py

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def merge_correspondance(self, other):
108108
i = where(other.datasets == array(self.datasets[-1]))[0]
109109
if len(i) != 1:
110110
raise Exception('More than one intersection')
111-
111+
112112
# Merge
113113
# Create a hash table
114114
translate = empty(other.current_id, dtype='u4')
@@ -194,7 +194,7 @@ def store_correspondance(self, i_previous, i_current, nb_real_obs):
194194
def append(self, *args, **kwargs):
195195
self.nb_link_max = max(self.nb_link_max, len(args[0]))
196196
super(Correspondances, self).append(*args, **kwargs)
197-
197+
198198
def id_generator(self, nb_id):
199199
"""Generation id and incrementation
200200
"""
@@ -221,7 +221,7 @@ def recense_dead_id_to_extend(self):
221221
list_previous_virtual_id.index(i) for i in virtual_dead_id]
222222
# Virtual obs which can be prolongate
223223
alive_virtual_obs = self.virtual_obs['segment_size'
224-
][i_virtual_dead_id] < self.nb_virtual
224+
][i_virtual_dead_id] < self.nb_virtual
225225
nb_virtual_extend = alive_virtual_obs.sum()
226226
logging.debug('%d virtual obs will be prolongate on the '
227227
'next step', nb_virtual_extend)
@@ -230,7 +230,7 @@ def recense_dead_id_to_extend(self):
230230
self.previous_virtual_obs = self.virtual_obs
231231
# Creation of an virtual step for dead one
232232
self.virtual_obs = VirtualEddiesObservations(
233-
size=nb_dead + nb_virtual_extend,
233+
size=nb_dead + nb_virtual_extend,
234234
track_extra_variables=self.previous_obs.track_extra_variables,
235235
track_array_variables=self.previous_obs.track_array_variables,
236236
array_variables=self.previous_obs.array_variables)
@@ -255,27 +255,27 @@ def recense_dead_id_to_extend(self):
255255
self.virtual_obs['dlon'][:nb_dead] = obs_b['lon'] - obs_a['lon']
256256
self.virtual_obs['dlat'][:nb_dead] = obs_b['lat'] - obs_a['lat']
257257
self.virtual_obs['lon'][:nb_dead
258-
] = obs_b['lon'] + self.virtual_obs['dlon'][:nb_dead]
258+
] = obs_b['lon'] + self.virtual_obs['dlon'][:nb_dead]
259259
self.virtual_obs['lat'][:nb_dead
260-
] = obs_b['lat'] + self.virtual_obs['dlat'][:nb_dead]
260+
] = obs_b['lat'] + self.virtual_obs['dlat'][:nb_dead]
261261
# Id which are extended
262262
self.virtual_obs['track'][:nb_dead] = dead_id
263263
# Add previous virtual
264264
if nb_virtual_extend > 0:
265265
obs_to_extend = self.previous_virtual_obs.obs[i_virtual_dead_id
266-
][alive_virtual_obs]
266+
][alive_virtual_obs]
267267
for key in obs_b.dtype.fields.keys():
268268
if key in ['lon', 'lat', 'time', 'track', 'segment_size',
269269
'dlon', 'dlat'] or 'contour_' in key:
270270
continue
271271
self.virtual_obs[key][nb_dead:] = obs_to_extend[key]
272272
self.virtual_obs['lon'][nb_dead:
273-
] = obs_to_extend['lon'] + obs_to_extend['dlon']
273+
] = obs_to_extend['lon'] + obs_to_extend['dlon']
274274
self.virtual_obs['lat'][nb_dead:
275-
] = obs_to_extend['lat'] + obs_to_extend['dlat']
275+
] = obs_to_extend['lat'] + obs_to_extend['dlat']
276276
self.virtual_obs['track'][nb_dead:] = obs_to_extend['track']
277277
self.virtual_obs['segment_size'][nb_dead:
278-
] = obs_to_extend['segment_size']
278+
] = obs_to_extend['segment_size']
279279
# Count
280280
self.virtual_obs['segment_size'][:] += 1
281281

@@ -318,7 +318,7 @@ def save(self, filename):
318318
# Create dimensions
319319
logging.debug('Create Dimensions "Nlink" : %d', self.nb_link_max)
320320
h_nc.createDimension('Nlink', self.nb_link_max)
321-
321+
322322
logging.debug('Create Dimensions "Nstep" : %d', nb_step)
323323
h_nc.createDimension('Nstep', nb_step)
324324
var_file_in = h_nc.createVariable(
@@ -333,16 +333,16 @@ def save(self, filename):
333333

334334
var_nb_link = h_nc.createVariable(
335335
zlib=True, complevel=1,
336-
varname='nb_link', datatype='u2', dimensions=('Nstep'))
337-
336+
varname='nb_link', datatype='u2', dimensions='Nstep')
337+
338338
for name, dtype in self.correspondance_dtype:
339339
if dtype is bool_:
340340
dtype = 'byte'
341341
h_nc.createVariable(zlib=True,
342342
complevel=1,
343343
varname=name,
344344
datatype=dtype,
345-
dimensions=('Nstep','Nlink'))
345+
dimensions=('Nstep', 'Nlink'))
346346

347347
for i, correspondance in enumerate(self):
348348
nb_elt = correspondance.shape[0]
@@ -389,8 +389,8 @@ def prepare_merging(self):
389389
# correspondance
390390
self.nb_obs_by_tracks[
391391
correspondance['id'][correspondance['virtual']]
392-
] += correspondance['virtual_length'][
393-
correspondance['virtual']]
392+
] += correspondance['virtual_length'][
393+
correspondance['virtual']]
394394

395395
# Compute index of each tracks
396396
self.i_current_by_tracks = \
@@ -414,7 +414,7 @@ def merge(self, until=-1):
414414
track_extra_variables=self.current_obs.track_extra_variables,
415415
track_array_variables=self.current_obs.track_array_variables,
416416
array_variables=self.current_obs.array_variables,
417-
)
417+
)
418418

419419
# Calculate the index in each tracks, we compute in u4 and translate
420420
# in u2 (which are limited to 65535)
@@ -433,7 +433,7 @@ def merge(self, until=-1):
433433

434434
# To know if the track start
435435
first_obs_save_in_tracks = zeros(self.i_current_by_tracks.shape,
436-
dtype=bool_)
436+
dtype=bool_)
437437

438438
for i, file_name in enumerate(self.datasets[1:]):
439439
if until != -1 and i >= until:
@@ -455,7 +455,7 @@ def merge(self, until=-1):
455455
for field in fields:
456456
var = field[0]
457457
eddies[var][index_final[m_first_obs]
458-
] = self.previous_obs[var][index_in]
458+
] = self.previous_obs[var][index_in]
459459
# Increment
460460
self.i_current_by_tracks[i_id[m_first_obs]] += 1
461461
# Active this flag, we have only one first by tracks
@@ -469,7 +469,7 @@ def merge(self, until=-1):
469469
if m_virtual.any():
470470
# Incrementing index
471471
self.i_current_by_tracks[i_id[m_virtual]
472-
] += self[i]['virtual_length'][m_virtual]
472+
] += self[i]['virtual_length'][m_virtual]
473473
# Get new index
474474
index_final = self.i_current_by_tracks[i_id]
475475

@@ -480,7 +480,7 @@ def merge(self, until=-1):
480480
for field in fields:
481481
var = field[0]
482482
eddies[var][index_final
483-
] = self.current_obs[var][index_current]
483+
] = self.current_obs[var][index_current]
484484

485485
# Add increment for each index used
486486
self.i_current_by_tracks[i_id] += 1

src/py_eddy_tracker/tracking_objects.py

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -51,13 +51,13 @@ def nearest(lon_pt, lat_pt, lon2d, lat2d):
5151
"""
5252
try:
5353
i_x = int_(interp(lon_pt,
54-
lon2d,
55-
arange(len(lon2d)),
56-
left=0, right=-1))
54+
lon2d,
55+
arange(len(lon2d)),
56+
left=0, right=-1))
5757
i_y = int_(interp(lat_pt,
58-
lat2d,
59-
arange(len(lat2d)),
60-
left=0, right=-1))
58+
lat2d,
59+
arange(len(lat2d)),
60+
left=0, right=-1))
6161
except ValueError:
6262
logging.error('%s, %s', lat2d, lat_pt)
6363
raise ValueError()
@@ -68,6 +68,7 @@ class IdentificationList(object):
6868
"""
6969
Class that holds list of eddy identify:
7070
"""
71+
7172
def __init__(self, sign_type, grd, date, **kwargs):
7273
"""
7374
Initialise the list 'tracklist'
@@ -80,10 +81,10 @@ def __init__(self, sign_type, grd, date, **kwargs):
8081
self.the_domain = kwargs.get('THE_DOMAIN', 'Regional')
8182
self.track_extra_variables = kwargs.get('TRACK_EXTRA_VARIABLES', [])
8283
if self.track_extra_variables is None:
83-
self.track_extra_variables = []
84+
self.track_extra_variables = []
8485
array_properties = kwargs.get('TRACK_ARRAY_VARIABLES', dict())
8586
if array_properties is None:
86-
array_properties = dict()
87+
array_properties = dict()
8788
self.track_array_variables_sampling = array_properties.get('NB_SAMPLES', 0)
8889
self.track_array_variables = array_properties.get('VARIABLES', [])
8990
self.smoothing = kwargs.get('SMOOTHING', True)
@@ -113,7 +114,7 @@ def __init__(self, sign_type, grd, date, **kwargs):
113114
track_extra_variables=self.track_extra_variables,
114115
track_array_variables=self.track_array_variables_sampling,
115116
array_variables=self.track_array_variables
116-
)
117+
)
117118

118119
self.index = 0 # counter
119120
self.pad = 2
@@ -180,14 +181,14 @@ def create_variable(self, handler_nc, kwargs_variable,
180181
var.setncattr('min', var[:].min())
181182
var.setncattr('max', var[:].max())
182183
except ValueError:
183-
logging.warn('Data is empty')
184+
logging.warning('Data is empty')
184185

185186
def write_netcdf(self, path='./'):
186187
"""Write a netcdf with eddy obs
187188
"""
188189
eddy_size = len(self.observations)
189190
filename = '%s/%s_%s.nc' % (
190-
path,self.sign_type, self.date.strftime('%Y%m%d'))
191+
path, self.sign_type, self.date.strftime('%Y%m%d'))
191192
with Dataset(filename, 'w', format='NETCDF4') as h_nc:
192193
logging.info('Create intermediary file %s', filename)
193194
# Create dimensions
@@ -208,7 +209,7 @@ def write_netcdf(self, path='./'):
208209
self.observations.obs[name],
209210
scale_factor=VAR_DESCR[name].get('scale_factor', None),
210211
add_offset=VAR_DESCR[name].get('add_offset', None)
211-
)
212+
)
212213

213214
# Add cyclonic information
214215
self.create_variable(
@@ -260,9 +261,9 @@ def set_mask_eff(self, contour, grd):
260261
mask for effective contour
261262
"""
262263
self.points = array([grd.lon[self.slice_j,
263-
self.slice_i].ravel(),
264-
grd.lat[self.slice_j,
265-
self.slice_i].ravel()]).T
264+
self.slice_i].ravel(),
265+
grd.lat[self.slice_j,
266+
self.slice_i].ravel()]).T
266267
# NOTE: Path.contains_points requires matplotlib 1.2 or higher
267268
self.mask_eff_1d = contour.contains_points(self.points)
268269
self.mask_eff_sum = self.mask_eff_1d.sum()
@@ -274,5 +275,4 @@ def reshape_mask_eff(self, grd):
274275
self.mask_eff = self.mask_eff_1d.reshape(shape)
275276

276277
def check_pixel_count(self, nb_valid_pixel):
277-
return nb_valid_pixel >= self.pixel_threshold[0] and \
278-
nb_valid_pixel <= self.pixel_threshold[1]
278+
return self.pixel_threshold[0] <= nb_valid_pixel <= self.pixel_threshold[1]

0 commit comments

Comments
 (0)