|
20 | 20 |
|
21 | 21 | try: # python 3 |
22 | 22 | from urllib.request import urlopen, FancyUrlOpener, Request # noqa |
23 | | - from urllib.parse import urlencode, quote |
| 23 | + from urllib.parse import urlparse, urlencode, quote |
24 | 24 | from urllib.error import HTTPError |
25 | 25 | except ImportError: # python 2.7 |
26 | 26 | from urllib import urlencode, FancyURLopener, quote |
@@ -283,20 +283,23 @@ def previous_page(self): |
283 | 283 | return FSRequest.request(self.previous, {}, self.client, Pager) |
284 | 284 | def get_page(self, n): |
285 | 285 | url = self.next |
286 | | - |
287 | | - uri = urlparse(url) |
288 | | - for index,item in enumerate(uri): |
289 | | - if ('query' in item): |
290 | | - urid=index |
| 286 | + |
| 287 | + if (url): |
| 288 | + uri = urlparse(url) |
| 289 | + for index,item in enumerate(uri): |
| 290 | + if ('query' in item): |
| 291 | + urid=index |
291 | 292 |
|
292 | | - query = uri[urid].split("&") |
| 293 | + query = uri[urid].split("&") |
293 | 294 |
|
294 | | - for index,item in enumerate(query): |
295 | | - if ("page" in item): |
296 | | - query[index] = 'page=' + str(n) |
| 295 | + for index,item in enumerate(query): |
| 296 | + if ("page" in item): |
| 297 | + query[index] = 'page=' + str(n) |
297 | 298 |
|
298 | | - url = uri[0] + '://' + uri[1] + uri[2] + '?' + "&".join(query) |
299 | | - return FSRequest.request(url, {}, self.client, Pager) |
| 299 | + url = uri[0] + '://' + uri[1] + uri[2] + '?' + "&".join(query) |
| 300 | + return FSRequest.request(url, {}, self.client, Pager) |
| 301 | + else: |
| 302 | + return None |
300 | 303 |
|
301 | 304 |
|
302 | 305 |
|
|
0 commit comments