Skip to content

Commit 85e6001

Browse files
committed
Handle ResourceWarning in admin.py do_export
ResourceWarning: unclosed file <_io.TextIOWrapper name='_test_export/priority.csv' mode='r' encoding='UTF-8'>
1 parent afce33b commit 85e6001

File tree

1 file changed

+53
-56
lines changed

1 file changed

+53
-56
lines changed

roundup/admin.py

Lines changed: 53 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1310,67 +1310,64 @@ class colon_separated(csv.excel):
13101310
sys.stdout.write('Exporting %s WITHOUT the files\r\n' %
13111311
classname)
13121312

1313-
f = open(os.path.join(dir, classname+'.csv'), 'w')
1314-
writer = csv.writer(f, colon_separated)
1315-
1316-
properties = cl.getprops()
1317-
propnames = cl.export_propnames()
1318-
fields = propnames[:]
1319-
fields.append('is retired')
1320-
writer.writerow(fields)
1321-
1322-
# If a node has a key, sort all nodes by key
1323-
# with retired nodes first. Retired nodes
1324-
# must occur before a non-retired node with
1325-
# the same key. Otherwise you get an
1326-
# IntegrityError: UNIQUE constraint failed:
1327-
# _class.__retired__, _<class>._<keyname>
1328-
# on imports to rdbms.
1329-
all_nodes = cl.getnodeids()
1330-
1331-
classkey = cl.getkey()
1332-
if classkey: # False sorts before True, so negate is_retired
1333-
keysort = lambda i: (cl.get(i, classkey),
1334-
not cl.is_retired(i))
1335-
all_nodes.sort(key=keysort)
1336-
# if there is no classkey no need to sort
1337-
1338-
for nodeid in all_nodes:
1339-
if self.verbose:
1340-
sys.stdout.write('\rExporting %s - %s' %
1341-
(classname, nodeid))
1342-
sys.stdout.flush()
1343-
node = cl.getnode(nodeid)
1344-
exp = cl.export_list(propnames, nodeid)
1345-
lensum = sum([len(repr_export(node[p])) for p in propnames])
1346-
# for a safe upper bound of field length we add
1347-
# difference between CSV len and sum of all field lengths
1348-
d = sum([len(x) for x in exp]) - lensum
1349-
if not d > 0:
1350-
raise AssertionError("Bad assertion d > 0")
1351-
for p in propnames:
1352-
ll = len(repr_export(node[p])) + d
1353-
if ll > max_len:
1354-
max_len = ll
1355-
writer.writerow(exp)
1356-
if export_files and hasattr(cl, 'export_files'):
1357-
cl.export_files(dir, nodeid)
1358-
1359-
# close this file
1360-
f.close()
1313+
with open(os.path.join(dir, classname+'.csv'), 'w') as f:
1314+
writer = csv.writer(f, colon_separated)
1315+
1316+
properties = cl.getprops()
1317+
propnames = cl.export_propnames()
1318+
fields = propnames[:]
1319+
fields.append('is retired')
1320+
writer.writerow(fields)
1321+
1322+
# If a node has a key, sort all nodes by key
1323+
# with retired nodes first. Retired nodes
1324+
# must occur before a non-retired node with
1325+
# the same key. Otherwise you get an
1326+
# IntegrityError: UNIQUE constraint failed:
1327+
# _class.__retired__, _<class>._<keyname>
1328+
# on imports to rdbms.
1329+
all_nodes = cl.getnodeids()
1330+
1331+
classkey = cl.getkey()
1332+
if classkey: # False sorts before True, so negate is_retired
1333+
keysort = lambda i: (cl.get(i, classkey),
1334+
not cl.is_retired(i))
1335+
all_nodes.sort(key=keysort)
1336+
# if there is no classkey no need to sort
1337+
1338+
for nodeid in all_nodes:
1339+
if self.verbose:
1340+
sys.stdout.write('\rExporting %s - %s' %
1341+
(classname, nodeid))
1342+
sys.stdout.flush()
1343+
node = cl.getnode(nodeid)
1344+
exp = cl.export_list(propnames, nodeid)
1345+
lensum = sum([len(repr_export(node[p])) for p in propnames])
1346+
# for a safe upper bound of field length we add
1347+
# difference between CSV len and sum of all field lengths
1348+
d = sum([len(x) for x in exp]) - lensum
1349+
if not d > 0:
1350+
raise AssertionError("Bad assertion d > 0")
1351+
for p in propnames:
1352+
ll = len(repr_export(node[p])) + d
1353+
if ll > max_len:
1354+
max_len = ll
1355+
writer.writerow(exp)
1356+
if export_files and hasattr(cl, 'export_files'):
1357+
cl.export_files(dir, nodeid)
13611358

13621359
# export the journals
1363-
jf = open(os.path.join(dir, classname+'-journals.csv'), 'w')
1364-
if self.verbose:
1365-
sys.stdout.write("\nExporting Journal for %s\n" % classname)
1366-
sys.stdout.flush()
1367-
journals = csv.writer(jf, colon_separated)
1368-
for row in cl.export_journals():
1369-
journals.writerow(row)
1370-
jf.close()
1360+
with open(os.path.join(dir, classname+'-journals.csv'), 'w') as jf:
1361+
if self.verbose:
1362+
sys.stdout.write("\nExporting Journal for %s\n" % classname)
1363+
sys.stdout.flush()
1364+
journals = csv.writer(jf, colon_separated)
1365+
for row in cl.export_journals():
1366+
journals.writerow(row)
13711367
if max_len > self.db.config.CSV_FIELD_SIZE:
13721368
print("Warning: config csv_field_size should be at least %s" %
13731369
max_len, file=sys.stderr)
1370+
jf.close()
13741371
return 0
13751372

13761373
def do_exporttables(self, args):

0 commit comments

Comments
 (0)