Merge pull request #164 from codl/link-flakeid

more robust handling of pagination Link headers
This commit is contained in:
Lorenz Diener 2019-04-27 17:20:20 +02:00 cometido por GitHub
commit a264154073
No se encontró ninguna clave conocida en la base de datos para esta firma
ID de clave GPG: 4AEE18F83AFDEB23
S'han modificat 5 arxius amb 42 adicions i 5 eliminacions

Veure arxiu

@ -13,3 +13,4 @@ pytest-cov = "*"
vcrpy = "*"
pytest-vcr = "<1"
pytest-mock = "*"
requests-mock = "*"

8
Pipfile.lock generado
Veure arxiu

@ -313,6 +313,14 @@
],
"version": "==2.20.1"
},
"requests-mock": {
"hashes": [
"sha256:7a5fa99db5e3a2a961b6f20ed40ee6baeff73503cf0a553cc4d679409e6170fb",
"sha256:8ca0628dc66d3f212878932fd741b02aa197ad53fd2228164800a169a4a826af"
],
"index": "pypi",
"version": "==1.5.2"
},
"six": {
"hashes": [
"sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",

Veure arxiu

@ -2275,13 +2275,17 @@ class Mastodon:
if url['rel'] == 'next':
# Be paranoid and extract max_id specifically
next_url = url['url']
matchgroups = re.search(r"max_id=([0-9]*)", next_url)
matchgroups = re.search(r"[?&]max_id=([^&]+)", next_url)
if matchgroups:
next_params = copy.deepcopy(params)
next_params['_pagination_method'] = method
next_params['_pagination_endpoint'] = endpoint
next_params['max_id'] = int(matchgroups.group(1))
max_id = matchgroups.group(1)
if max_id.isdigit():
next_params['max_id'] = int(max_id)
else:
next_params['max_id'] = max_id
if "since_id" in next_params:
del next_params['since_id']
response[-1]._pagination_next = next_params
@ -2289,13 +2293,17 @@ class Mastodon:
if url['rel'] == 'prev':
# Be paranoid and extract since_id specifically
prev_url = url['url']
matchgroups = re.search(r"since_id=([0-9]*)", prev_url)
matchgroups = re.search(r"[?&]since_id=([^&]+)", prev_url)
if matchgroups:
prev_params = copy.deepcopy(params)
prev_params['_pagination_method'] = method
prev_params['_pagination_endpoint'] = endpoint
prev_params['since_id'] = int(matchgroups.group(1))
since_id = matchgroups.group(1)
if since_id.isdigit():
prev_params['since_id'] = int(since_id)
else:
prev_params['since_id'] = since_id
if "max_id" in prev_params:
del prev_params['max_id']
response[0]._pagination_prev = prev_params

Veure arxiu

@ -1,6 +1,6 @@
from setuptools import setup
test_deps = ['pytest', 'pytest-runner', 'pytest-cov', 'vcrpy', 'pytest-vcr', 'pytest-mock']
test_deps = ['pytest', 'pytest-runner', 'pytest-cov', 'vcrpy', 'pytest-vcr', 'pytest-mock', 'requests-mock']
extras = {
"test": test_deps
}

Veure arxiu

@ -1,5 +1,10 @@
import pytest
from contextlib import contextmanager
try:
from mock import MagicMock
except ImportError:
from unittest.mock import MagicMock
import requests_mock
UNLIKELY_HASHTAG = "fgiztsshwiaqqiztpmmjbtvmescsculuvmgjgopwoeidbcrixp"
@ -44,3 +49,18 @@ def test_fetch_remaining(api):
hashtag_remaining = api.fetch_remaining(hashtag)
assert hashtag_remaining
assert len(hashtag_remaining) >= 30
def test_link_headers(api):
rmock = requests_mock.Adapter()
api.session.mount(api.api_base_url, rmock)
_id='abc1234'
rmock.register_uri('GET', requests_mock.ANY, json=[{"foo": "bar"}], headers={"link":"""
<{base}/api/v1/timelines/tag/{tag}?max_id={_id}>; rel="next", <{base}/api/v1/timelines/tag/{tag}?since_id={_id}>; rel="prev"
""".format(base=api.api_base_url, tag=UNLIKELY_HASHTAG, _id=_id).strip()
})
resp = api.timeline_hashtag(UNLIKELY_HASHTAG)
assert resp[0]._pagination_next['max_id'] == _id
assert resp[0]._pagination_prev['since_id'] == _id