Skip to content

Commit d072e0b

Browse files
committed
Add support for prev/next/self links when returning paginated results.
To do this: 1) change "data" envelope from an array to a dict 2) move the "data" array to the "collection" property, which is an array of elements in the collection. 3) add @links dict keyed by link relation: self, next, prev. Each relation is an array of dicts with uri and rel keys. In this case there is only one element, but there is nothing preventing a relation from having multiple url's. So this follows the formatting needed for the general case. Relations are present only if it makes sense. So first page has no prev and last page has no next. 4) add @total_size with number of element selected if they were not paginated. Replicates data in X-Count-Total header. Changed index to start at 1. So the first page is page_index 1 and not page_index 0. (So I am no longer surprised when I set page_index to 1 and am missing a bunch of records 8-)). Also a small fixup, json response ends with a newline so printing the data, or using curl makes sure that anything printing after the json output (like shell prompts) is on a new line. Tests added for all cases.
1 parent a1f4472 commit d072e0b

File tree

3 files changed

+154
-41
lines changed

3 files changed

+154
-41
lines changed

CHANGES.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,10 @@ Features:
6161
- If dict2xml.py is installed, the rest interface can produce an XML
6262
format response if the accept header is set to text/xml.
6363
(See: https://pypi.org/project/dict2xml/)
64+
- When retrieving collection move list of collection elements to
65+
collection property. Add @links property with self, next and prev
66+
links (where needed). Add @total_size with size of entire
67+
collection (unpaginated). Pagination index starts at 1 not 0.
6468
(John Rouillard)
6569
- issue2550833: the export_csv web action now returns labels/names
6670
rather than id's. Replace calls to export_csv with the export_csv_id

roundup/rest.py

Lines changed: 35 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -484,7 +484,7 @@ def get_collection(self, class_name, input):
484484
filter_props = {}
485485
page = {
486486
'size': None,
487-
'index': None
487+
'index': 1 # setting just size starts at page 1
488488
}
489489
for form_field in input.value:
490490
key = form_field.name
@@ -506,21 +506,43 @@ def get_collection(self, class_name, input):
506506
obj_list = class_obj.filter(None, filter_props)
507507

508508
# extract result from data
509-
result = [
509+
result={}
510+
result['collection'] = [
510511
{'id': item_id, 'link': class_path + item_id}
511512
for item_id in obj_list
512513
if self.db.security.hasPermission(
513514
'View', self.db.getuid(), class_name, itemid=item_id
514515
)
515516
]
516-
517-
# pagination
518-
if page['size'] is not None and page['index'] is not None:
519-
page_start = max(page['index'] * page['size'], 0)
520-
page_end = min(page_start + page['size'], len(result))
521-
result = result[page_start:page_end]
522-
523-
self.client.setHeader("X-Count-Total", str(len(result)))
517+
result_len = len(result['collection'])
518+
519+
# pagination - page_index from 1...N
520+
if page['size'] is not None:
521+
page_start = max((page['index']-1) * page['size'], 0)
522+
page_end = min(page_start + page['size'], result_len)
523+
result['collection'] = result['collection'][page_start:page_end]
524+
result['@links'] = {}
525+
for rel in ('next', 'prev', 'self'):
526+
if rel == 'next':
527+
# if current index includes all data, continue
528+
if page['index']*page['size'] > result_len: continue
529+
index=page['index']+1
530+
if rel == 'prev':
531+
if page['index'] <= 1: continue
532+
index=page['index']-1
533+
if rel == 'self': index=page['index']
534+
535+
result['@links'][rel] = []
536+
result['@links'][rel].append({
537+
'rel': rel,
538+
'uri': "%s/%s?page_index=%s&"%(self.data_path,
539+
class_name,index) \
540+
+ '&'.join([ "%s=%s"%(field.name,field.value) \
541+
for field in input.value \
542+
if field.name != "page_index"]) })
543+
544+
result['@total_size'] = result_len
545+
self.client.setHeader("X-Count-Total", str(result_len))
524546
return 200, result
525547

526548
@Routing.route("/data/<:class_name>/<:item_id>", 'GET')
@@ -1356,7 +1378,9 @@ def dispatch(self, method, uri, input):
13561378
self.client.response_code = 406
13571379
output = "Content type is not accepted by client"
13581380

1359-
return output
1381+
# Make output json end in a newline to
1382+
# separate from following text in logs etc..
1383+
return output + "\n"
13601384

13611385

13621386
class RoundupJSONEncoder(json.JSONEncoder):

test/rest_common.py

Lines changed: 115 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,13 @@ def testGet(self):
9292
# Retrieve all three users.
9393
results = self.server.get_collection('user', self.empty_form)
9494
self.assertEqual(self.dummy_client.response_code, 200)
95-
self.assertEqual(len(results['data']), 3)
95+
self.assertEqual(len(results['data']['collection']), 3)
96+
self.assertEqual(results['data']['@total_size'], 3)
97+
print self.dummy_client.additional_headers["X-Count-Total"]
98+
self.assertEqual(
99+
self.dummy_client.additional_headers["X-Count-Total"],
100+
"3"
101+
)
96102

97103
# Obtain data for 'joe'.
98104
results = self.server.get_element('user', self.joeid, self.empty_form)
@@ -169,10 +175,13 @@ def testFilter(self):
169175
]
170176
results = self.server.get_collection('issue', form)
171177
self.assertEqual(self.dummy_client.response_code, 200)
172-
self.assertIn(get_obj(base_path, issue_open_norm), results['data'])
173-
self.assertIn(get_obj(base_path, issue_open_crit), results['data'])
178+
self.assertIn(get_obj(base_path, issue_open_norm),
179+
results['data']['collection'])
180+
self.assertIn(get_obj(base_path, issue_open_crit),
181+
results['data']['collection'])
174182
self.assertNotIn(
175-
get_obj(base_path, issue_closed_norm), results['data']
183+
get_obj(base_path, issue_closed_norm),
184+
results['data']['collection']
176185
)
177186

178187
# Retrieve all issue status=closed and priority=critical
@@ -183,10 +192,13 @@ def testFilter(self):
183192
]
184193
results = self.server.get_collection('issue', form)
185194
self.assertEqual(self.dummy_client.response_code, 200)
186-
self.assertIn(get_obj(base_path, issue_closed_crit), results['data'])
187-
self.assertNotIn(get_obj(base_path, issue_open_crit), results['data'])
195+
self.assertIn(get_obj(base_path, issue_closed_crit),
196+
results['data']['collection'])
197+
self.assertNotIn(get_obj(base_path, issue_open_crit),
198+
results['data']['collection'])
188199
self.assertNotIn(
189-
get_obj(base_path, issue_closed_norm), results['data']
200+
get_obj(base_path, issue_closed_norm),
201+
results['data']['collection']
190202
)
191203

192204
# Retrieve all issue status=closed and priority=normal,critical
@@ -197,60 +209,133 @@ def testFilter(self):
197209
]
198210
results = self.server.get_collection('issue', form)
199211
self.assertEqual(self.dummy_client.response_code, 200)
200-
self.assertIn(get_obj(base_path, issue_closed_crit), results['data'])
201-
self.assertIn(get_obj(base_path, issue_closed_norm), results['data'])
202-
self.assertNotIn(get_obj(base_path, issue_open_crit), results['data'])
203-
self.assertNotIn(get_obj(base_path, issue_open_norm), results['data'])
212+
self.assertIn(get_obj(base_path, issue_closed_crit),
213+
results['data']['collection'])
214+
self.assertIn(get_obj(base_path, issue_closed_norm),
215+
results['data']['collection'])
216+
self.assertNotIn(get_obj(base_path, issue_open_crit),
217+
results['data']['collection'])
218+
self.assertNotIn(get_obj(base_path, issue_open_norm),
219+
results['data']['collection'])
204220

205221
def testPagination(self):
206222
"""
207-
Retrieve all three users
208-
obtain data for 'joe'
223+
Test pagination. page_size is required and is an integer
224+
starting at 1. page_index is optional and is an integer
225+
starting at 1. Verify that pagination links are present
226+
if paging, @total_size and X-Count-Total header match
227+
number of items.
209228
"""
210229
# create sample data
211-
for i in range(0, random.randint(5, 10)):
230+
for i in range(0, random.randint(8,15)):
212231
self.db.issue.create(title='foo' + str(i))
213232

214233
# Retrieving all the issues
215234
results = self.server.get_collection('issue', self.empty_form)
216235
self.assertEqual(self.dummy_client.response_code, 200)
217-
total_length = len(results['data'])
236+
total_length = len(results['data']['collection'])
237+
# Verify no pagination links if paging not used
238+
self.assertFalse('@links' in results['data'])
239+
self.assertEqual(results['data']['@total_size'], total_length)
240+
self.assertEqual(
241+
self.dummy_client.additional_headers["X-Count-Total"],
242+
str(total_length)
243+
)
244+
218245

219-
# Pagination will be 70% of the total result
220-
page_size = total_length * 70 // 100
221-
page_zero_expected = page_size
222-
page_one_expected = total_length - page_zero_expected
246+
# Pagination will be 45% of the total result
247+
# So 2 full pages and 1 partial page.
248+
page_size = total_length * 45 // 100
249+
page_one_expected = page_size
250+
page_two_expected = page_size
251+
page_three_expected = total_length - (2*page_one_expected)
252+
base_url="http://tracker.example/cgi-bin/roundup.cgi/" \
253+
"bugs/rest/data/issue"
223254

224-
# Retrieve page 0
255+
# Retrieve page 1
225256
form = cgi.FieldStorage()
226257
form.list = [
227258
cgi.MiniFieldStorage('page_size', page_size),
228-
cgi.MiniFieldStorage('page_index', 0)
259+
cgi.MiniFieldStorage('page_index', 1)
229260
]
230261
results = self.server.get_collection('issue', form)
231262
self.assertEqual(self.dummy_client.response_code, 200)
232-
self.assertEqual(len(results['data']), page_zero_expected)
263+
self.assertEqual(len(results['data']['collection']),
264+
page_one_expected)
265+
self.assertTrue('@links' in results['data'])
266+
self.assertTrue('self' in results['data']['@links'])
267+
self.assertTrue('next' in results['data']['@links'])
268+
self.assertFalse('prev' in results['data']['@links'])
269+
self.assertEqual(results['data']['@links']['self'][0]['uri'],
270+
"%s?page_index=1&page_size=%s"%(base_url,page_size))
271+
self.assertEqual(results['data']['@links']['next'][0]['uri'],
272+
"%s?page_index=2&page_size=%s"%(base_url,page_size))
233273

234-
# Retrieve page 1
274+
page_one_results = results # save this for later
275+
276+
# Retrieve page 2
235277
form = cgi.FieldStorage()
236278
form.list = [
237279
cgi.MiniFieldStorage('page_size', page_size),
238-
cgi.MiniFieldStorage('page_index', 1)
280+
cgi.MiniFieldStorage('page_index', 2)
239281
]
240282
results = self.server.get_collection('issue', form)
241283
self.assertEqual(self.dummy_client.response_code, 200)
242-
self.assertEqual(len(results['data']), page_one_expected)
243-
244-
# Retrieve page 2
284+
self.assertEqual(len(results['data']['collection']), page_two_expected)
285+
self.assertTrue('@links' in results['data'])
286+
self.assertTrue('self' in results['data']['@links'])
287+
self.assertTrue('next' in results['data']['@links'])
288+
self.assertTrue('prev' in results['data']['@links'])
289+
self.assertEqual(results['data']['@links']['self'][0]['uri'],
290+
"http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=2&page_size=%s"%page_size)
291+
self.assertEqual(results['data']['@links']['next'][0]['uri'],
292+
"http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=3&page_size=%s"%page_size)
293+
self.assertEqual(results['data']['@links']['prev'][0]['uri'],
294+
"http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=1&page_size=%s"%page_size)
295+
self.assertEqual(results['data']['@links']['self'][0]['rel'],
296+
'self')
297+
self.assertEqual(results['data']['@links']['next'][0]['rel'],
298+
'next')
299+
self.assertEqual(results['data']['@links']['prev'][0]['rel'],
300+
'prev')
301+
302+
# Retrieve page 3
245303
form = cgi.FieldStorage()
246304
form.list = [
247305
cgi.MiniFieldStorage('page_size', page_size),
248-
cgi.MiniFieldStorage('page_index', 2)
306+
cgi.MiniFieldStorage('page_index', 3)
249307
]
250308
results = self.server.get_collection('issue', form)
251309
self.assertEqual(self.dummy_client.response_code, 200)
252-
self.assertEqual(len(results['data']), 0)
253-
310+
self.assertEqual(len(results['data']['collection']), page_three_expected)
311+
self.assertTrue('@links' in results['data'])
312+
self.assertTrue('self' in results['data']['@links'])
313+
self.assertFalse('next' in results['data']['@links'])
314+
self.assertTrue('prev' in results['data']['@links'])
315+
self.assertEqual(results['data']['@links']['self'][0]['uri'],
316+
"http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=3&page_size=%s"%page_size)
317+
self.assertEqual(results['data']['@links']['prev'][0]['uri'],
318+
"http://tracker.example/cgi-bin/roundup.cgi/bugs/rest/data/issue?page_index=2&page_size=%s"%page_size)
319+
320+
# Verify that page_index is optional
321+
# Should start at page 1
322+
form = cgi.FieldStorage()
323+
form.list = [
324+
cgi.MiniFieldStorage('page_size', page_size),
325+
]
326+
results = self.server.get_collection('issue', form)
327+
self.assertEqual(self.dummy_client.response_code, 200)
328+
self.assertEqual(len(results['data']['collection']), page_size)
329+
self.assertTrue('@links' in results['data'])
330+
self.assertTrue('self' in results['data']['@links'])
331+
self.assertTrue('next' in results['data']['@links'])
332+
self.assertFalse('prev' in results['data']['@links'])
333+
self.assertEqual(page_one_results, results)
334+
335+
# FIXME add tests for out of range once we decide what response
336+
# is needed to:
337+
# page_size < 0
338+
# page_index < 0
254339

255340
def testEtagProcessing(self):
256341
'''

0 commit comments

Comments
 (0)