55"""Extract translatable strings from tracker templates and detectors/extensions"""
66
77from __future__ import print_function
8+
89import os
9- import sys
10- import tempfile
1110
1211# --- patch sys.path to make sure 'import roundup' finds correct version
1312import os .path as osp
13+ import sys
14+ import tempfile
1415
1516thisdir = osp .dirname (osp .abspath (__file__ ))
1617rootdir = osp .dirname (osp .dirname (thisdir ))
2122# --/
2223
2324
24- from roundup .i18n import _
2525from roundup .cgi .TAL import talgettext
26- from roundup .pygettext import make_escapes , TokenEater , tokenize
26+ from roundup .i18n import _
27+ from roundup .pygettext import TokenEater , make_escapes , tokenize
2728
2829try :
2930 import polib
3435 "The 'polib' module can be installed with pip.\n " ))
3536 polib = None
3637
38+
3739# from pygettext's main():
3840class Options :
3941 # constants
@@ -54,6 +56,7 @@ class Options:
5456 nodocstrings = {}
5557 toexclude = [] # TODO we should exclude all strings already found in some template
5658
59+
5760tokeneater_options = Options ()
5861
5962# name of message template file.
@@ -104,38 +107,39 @@ def run():
104107 make_escapes (not tokeneater_options .escape )
105108
106109 pyfiles = []
107- for source in ["detectors" , "extensions" ] :
108- for root , dirs , files in os .walk (os .path .join (".." , source )) :
109- pyfiles .extend ([os .path .join (root , f ) for f in files if f .endswith (".py" )])
110+ for source in ["detectors" , "extensions" ]:
111+ for root , _dirs , files in os .walk (os .path .join (".." , source )):
112+ pyfiles .extend ([os .path .join (root , f ) for f in files if f .endswith (".py" )])
110113
111- eater = TokenEater (tokeneater_options )
114+ eater = TokenEater (tokeneater_options )
112115
113- for filename in pyfiles :
114- eater .set_filename (filename )
115- with open (filename , "r" ) as f :
116+ for filename in pyfiles :
117+ eater .set_filename (filename )
118+ with open (filename , "r" ) as f :
116119 try :
117120 for token in tokenize .generate_tokens (f .readline ):
118121 eater (* token )
119122 except tokenize .TokenError as e :
120123 print ('%s: %s, line %d, column %d' % (
121124 e [0 ], filename , e [1 ][0 ], e [1 ][1 ]), file = sys .stderr )
122-
123- with tempfile .NamedTemporaryFile ("w" ) as tf :
125+
126+ with tempfile .NamedTemporaryFile ("w" ) as tf :
124127 eater .write (tf )
125- tf .seek (0 )
128+ tf .seek (0 )
126129 p1 = polib .pofile (TEMPLATE_FILE )
127130 p2 = polib .pofile (tf .name )
128131
129- p2_msg_ids = set ([e .msgid for e in p2 ])
130- for e in p1 :
131- if e .msgid in p2_msg_ids :
132- p2_e = p2 .find (e .msgid )
133- e .occurrences .extend (p2_e .occurrences )
134- p2_msg_ids .remove (e .msgid )
132+ p2_msg_ids = {e .msgid for e in p2 }
133+ for e in p1 :
134+ if e .msgid in p2_msg_ids :
135+ p2_e = p2 .find (e .msgid )
136+ e .occurrences .extend (p2_e .occurrences )
137+ p2_msg_ids .remove (e .msgid )
138+
139+ for msgid in p2_msg_ids :
140+ p1 .append (p2 .find (msgid ))
141+ p1 .save ()
135142
136- for msgid in p2_msg_ids :
137- p1 .append (p2 .find (msgid ))
138- p1 .save ()
139143
140144if __name__ == "__main__" :
141145 run ()
0 commit comments