forked from AntSimi/py-eddy-tracker
-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathEddyTracking
More file actions
146 lines (119 loc) · 5.75 KB
/
EddyTracking
File metadata and controls
146 lines (119 loc) · 5.75 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Track eddy with Identification file produce with EddyIdentification
"""
from py_eddy_tracker import EddyParser
from yaml import load as yaml_load
from py_eddy_tracker.tracking import Correspondances
from os.path import exists, dirname, basename
from os import mkdir
import logging
import datetime as dt
from py_eddy_tracker.grid import browse_dataset_in
def usage():
"""Usage
"""
# Run using:
parser = EddyParser(
"Tool to use identification step to compute tracking")
parser.add_argument('yaml_file',
help='Yaml file to configure py-eddy-tracker')
parser.add_argument('--correspondance_in',
help='Filename of saved correspondance')
parser.add_argument('--correspondance_out',
help='Filename to save correspondance')
parser.add_argument('--save_correspondance_and_stop',
action='store_true',
help='Stop tracking after correspondance computation,'
' merging can be done with EddyFinalTracking')
parser.add_argument('--blank_period',
type=int,
default=0,
help='Nb of detection which will not use at the end of the period')
args = parser.parse_args()
# Read yaml configuration file
with open(args.yaml_file, 'r') as stream:
config = yaml_load(stream)
if args.correspondance_in is not None and not exists(args.correspondance_in):
args.correspondance_in = None
return config, args.save_correspondance_and_stop, args.correspondance_in, args.correspondance_out, args.blank_period
if __name__ == '__main__':
CONFIG, SAVE_STOP, CORRESPONDANCES_IN, CORRESPONDANCES_OUT, BLANK_PERIOD = usage()
# Create output directory
SAVE_DIR = CONFIG['PATHS'].get('SAVE_DIR', None)
if SAVE_DIR is not None and not exists(SAVE_DIR):
mkdir(SAVE_DIR)
YAML_CORRESPONDANCES_IN = CONFIG['PATHS'].get('CORRESPONDANCES_IN', None)
YAML_CORRESPONDANCES_OUT = CONFIG['PATHS'].get('CORRESPONDANCES_OUT', None)
if CORRESPONDANCES_IN is None:
CORRESPONDANCES_IN = YAML_CORRESPONDANCES_IN
if CORRESPONDANCES_OUT is None:
CORRESPONDANCES_OUT = YAML_CORRESPONDANCES_OUT
if YAML_CORRESPONDANCES_OUT is None and CORRESPONDANCES_OUT is None:
CORRESPONDANCES_OUT = '{path}/{sign_type}_correspondances.nc'
if 'CLASS' in CONFIG:
CLASS = getattr(
__import__(CONFIG['CLASS']['MODULE'], globals(), locals(), CONFIG['CLASS']['CLASS']),
CONFIG['CLASS']['CLASS'])
else:
CLASS = None
NB_VIRTUAL_OBS_MAX_BY_SEGMENT = int(CONFIG.get('VIRTUAL_LENGTH_MAX', 0))
if isinstance(CONFIG['PATHS']['FILES_PATTERN'], list):
DATASET_LIST = browse_dataset_in(
data_dir=None,
files_model=None,
files=CONFIG['PATHS']['FILES_PATTERN'],
date_regexp='.*c_([0-9]*?).nc',
date_model='%Y%m%d')
else:
DATASET_LIST = browse_dataset_in(
data_dir=dirname(CONFIG['PATHS']['FILES_PATTERN']),
files_model=basename(CONFIG['PATHS']['FILES_PATTERN']),
date_regexp='.*c_([0-9]*?).nc',
date_model='%Y%m%d')
if BLANK_PERIOD > 0:
DATASET_LIST = DATASET_LIST[:-BLANK_PERIOD]
logging.info('Last %d files will be pop', BLANK_PERIOD)
START_TIME = dt.datetime.now()
logging.info('Start tracking on %d files', len(DATASET_LIST))
CORRESPONDANCES = Correspondances(
datasets=DATASET_LIST['filename'],
virtual=NB_VIRTUAL_OBS_MAX_BY_SEGMENT,
class_method=CLASS,
previous_correspondance=CORRESPONDANCES_IN)
CORRESPONDANCES.track()
logging.info('Track finish')
logging.info('Start merging')
DATE_START, DATE_STOP = CORRESPONDANCES.period
DICT_COMPLETION = dict(date_start=DATE_START, date_stop=DATE_STOP, date_prod=START_TIME,
path=SAVE_DIR, sign_type=CORRESPONDANCES.current_obs.sign_legend)
CORRESPONDANCES.save(CORRESPONDANCES_OUT, DICT_COMPLETION)
if SAVE_STOP:
exit()
# Merge correspondance, only do if we stop and store just after compute of correspondance
NB_OBS_MIN = int(CONFIG.get('TRACK_DURATION_MIN', 14))
CORRESPONDANCES.prepare_merging()
logging.info('The longest tracks have %d observations', CORRESPONDANCES.nb_obs_by_tracks.max())
logging.info('The mean length is %d observations before filtering', CORRESPONDANCES.nb_obs_by_tracks.mean())
CORRESPONDANCES.get_unused_data().write_netcdf(path=SAVE_DIR, filename='%(path)s/%(sign_type)s_untracked.nc')
SHORT_CORRESPONDANCES = CORRESPONDANCES._copy()
SHORT_CORRESPONDANCES.shorter_than(size_max=NB_OBS_MIN)
CORRESPONDANCES.longer_than(size_min=NB_OBS_MIN)
FINAL_EDDIES = CORRESPONDANCES.merge()
SHORT_TRACK = SHORT_CORRESPONDANCES.merge()
# We flag obs
if CORRESPONDANCES.virtual:
FINAL_EDDIES['virtual'][:] = FINAL_EDDIES['time'] == 0
FINAL_EDDIES.filled_by_interpolation(FINAL_EDDIES['virtual'] == 1)
SHORT_TRACK['virtual'][:] = SHORT_TRACK['time'] == 0
SHORT_TRACK.filled_by_interpolation(SHORT_TRACK['virtual'] == 1)
# Total running time
FULL_TIME = dt.datetime.now() - START_TIME
logging.info('Mean duration by loop : %s',
FULL_TIME / (len(DATASET_LIST) - 1))
logging.info('Duration : %s', FULL_TIME)
logging.info('Longer track saved have %d obs', CORRESPONDANCES.nb_obs_by_tracks.max())
logging.info('The mean length is %d observations after filtering', CORRESPONDANCES.nb_obs_by_tracks.mean())
FINAL_EDDIES.write_netcdf(path=SAVE_DIR)
SHORT_TRACK.write_netcdf(filename='%(path)s/%(sign_type)s_track_too_short.nc', path=SAVE_DIR)