Mastodonplus.py/mastodon/Mastodon.py

1444 líneas
52 KiB
Python
Original Vista normal Històric

2016-11-23 23:30:51 +01:00
# coding: utf-8
import os
import os.path
2016-11-24 12:34:31 +01:00
import mimetypes
import time
import random
import string
2016-11-25 23:14:00 +01:00
import datetime
from contextlib import closing
import pytz
2017-04-07 23:59:39 +02:00
import requests
2017-04-26 11:43:16 +02:00
from requests.models import urlencode
import dateutil
import dateutil.parser
2017-06-16 01:23:19 +02:00
import re
import copy
import threading
import sys
import six
2017-11-24 15:08:34 +01:00
try:
from urllib.parse import urlparse
except ImportError:
2017-11-24 15:08:34 +01:00
from urlparse import urlparse
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
class Mastodon:
"""
2017-06-16 01:23:19 +02:00
Super basic but thorough and easy to use Mastodon
2016-11-24 00:36:00 +01:00
api wrapper in python.
2016-11-24 00:36:00 +01:00
If anything is unclear, check the official API docs at
2017-06-15 22:32:17 +02:00
https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md
2016-11-23 23:30:51 +01:00
"""
__DEFAULT_BASE_URL = 'https://mastodon.social'
2016-12-13 17:17:33 +01:00
__DEFAULT_TIMEOUT = 300
2016-11-23 23:30:51 +01:00
###
# Registering apps
###
@staticmethod
2017-09-05 22:59:32 +02:00
def create_app(client_name, scopes=['read', 'write', 'follow'], redirect_uris=None, website=None, to_file=None,
api_base_url=__DEFAULT_BASE_URL, request_timeout=__DEFAULT_TIMEOUT):
2016-11-24 00:36:00 +01:00
"""
Create a new app with given client_name and scopes (read, write, follow)
2016-11-24 00:36:00 +01:00
Specify redirect_uris if you want users to be redirected to a certain page after authenticating.
Specify to_file to persist your apps info to a file so you can use them in the constructor.
Specify api_base_url if you want to register an app on an instance different from the flagship one.
2016-11-25 18:17:39 +01:00
Presently, app registration is open by default, but this is not guaranteed to be the case for all
future mastodon instances or even the flagship instance in the future.
2016-11-24 00:36:00 +01:00
Returns client_id and client_secret.
2016-11-23 23:30:51 +01:00
"""
api_base_url = Mastodon.__protocolize(api_base_url)
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
request_data = {
'client_name': client_name,
'scopes': " ".join(scopes)
}
2016-12-13 17:17:33 +01:00
try:
2017-04-07 23:59:39 +02:00
if redirect_uris is not None:
2017-09-05 22:59:32 +02:00
request_data['redirect_uris'] = redirect_uris
2016-12-13 17:17:33 +01:00
else:
2017-09-05 22:59:32 +02:00
request_data['redirect_uris'] = 'urn:ietf:wg:oauth:2.0:oob'
2017-04-07 23:59:39 +02:00
if website is not None:
request_data['website'] = website
2017-09-05 22:59:32 +02:00
response = requests.post(api_base_url + '/api/v1/apps', data=request_data, timeout=request_timeout)
2017-04-07 23:59:39 +02:00
response = response.json()
except Exception as e:
raise MastodonNetworkError("Could not complete request: %s" % e)
2017-09-05 22:59:32 +02:00
if to_file is not None:
2016-11-23 23:30:51 +01:00
with open(to_file, 'w') as secret_file:
secret_file.write(response['client_id'] + '\n')
secret_file.write(response['client_secret'] + '\n')
2016-11-23 23:30:51 +01:00
return (response['client_id'], response['client_secret'])
2016-11-23 23:30:51 +01:00
###
# Authentication, including constructor
###
2017-09-05 23:07:24 +02:00
def __init__(self, client_id, client_secret=None, access_token=None,
api_base_url=__DEFAULT_BASE_URL, debug_requests=False,
ratelimit_method="wait", ratelimit_pacefactor=1.1,
2017-09-05 22:59:32 +02:00
request_timeout=__DEFAULT_TIMEOUT):
2016-11-24 00:36:00 +01:00
"""
Create a new API wrapper instance based on the given client_secret and client_id. If you
2016-11-24 00:36:00 +01:00
give a client_id and it is not a file, you must also give a secret.
2016-11-25 23:14:00 +01:00
You can also specify an access_token, directly or as a file (as written by log_in).
2016-11-25 18:17:39 +01:00
Mastodon.py can try to respect rate limits in several ways, controlled by ratelimit_method.
"throw" makes functions throw a MastodonRatelimitError when the rate
limit is hit. "wait" mode will, once the limit is hit, wait and retry the request as soon
as the rate limit resets, until it succeeds. "pace" works like throw, but tries to wait in
between calls so that the limit is generally not hit (How hard it tries to not hit the rate
2016-11-25 18:17:39 +01:00
limit can be controlled by ratelimit_pacefactor). The default setting is "wait". Note that
even in "wait" and "pace" mode, requests can still fail due to network or other problems! Also
note that "pace" and "wait" are NOT thread safe.
2016-11-24 00:36:00 +01:00
Specify api_base_url if you wish to talk to an instance other than the flagship one.
2016-12-13 17:17:33 +01:00
If a file is given as client_id, read client ID and secret from that file.
By default, a timeout of 300 seconds is used for all requests. If you wish to change this,
2016-12-13 17:17:33 +01:00
pass the desired timeout (in seconds) as request_timeout.
2016-11-23 23:30:51 +01:00
"""
self.api_base_url = Mastodon.__protocolize(api_base_url)
self.client_id = client_id
2016-11-23 23:30:51 +01:00
self.client_secret = client_secret
self.access_token = access_token
2016-11-24 20:07:00 +01:00
self.debug_requests = debug_requests
2016-11-25 18:17:39 +01:00
self.ratelimit_method = ratelimit_method
2017-04-07 23:59:39 +02:00
self._token_expired = datetime.datetime.now()
self._refresh_token = None
2017-09-08 15:06:31 +02:00
self.ratelimit_limit = 300
2016-11-25 18:17:39 +01:00
self.ratelimit_reset = time.time()
2017-09-08 15:06:31 +02:00
self.ratelimit_remaining = 300
2016-11-25 18:17:39 +01:00
self.ratelimit_lastcall = time.time()
2016-11-25 23:14:00 +01:00
self.ratelimit_pacefactor = ratelimit_pacefactor
2016-12-13 17:17:33 +01:00
self.request_timeout = request_timeout
2017-09-05 22:59:32 +02:00
if ratelimit_method not in ["throw", "wait", "pace"]:
2016-11-25 23:28:30 +01:00
raise MastodonIllegalArgumentError("Invalid ratelimit method.")
2016-11-23 23:30:51 +01:00
if os.path.isfile(self.client_id):
with open(self.client_id, 'r') as secret_file:
self.client_id = secret_file.readline().rstrip()
self.client_secret = secret_file.readline().rstrip()
else:
2017-09-05 22:59:32 +02:00
if self.client_secret is None:
2016-11-25 18:17:39 +01:00
raise MastodonIllegalArgumentError('Specified client id directly, but did not supply secret')
2017-09-05 22:59:32 +02:00
if self.access_token is not None and os.path.isfile(self.access_token):
2016-11-23 23:30:51 +01:00
with open(self.access_token, 'r') as token_file:
self.access_token = token_file.readline().rstrip()
2017-04-07 23:59:39 +02:00
2017-09-05 22:59:32 +02:00
def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob",
scopes=['read', 'write', 'follow']):
2017-04-07 23:59:39 +02:00
"""Returns the url that a client needs to request the grant from the server.
"""
if client_id is None:
client_id = self.client_id
else:
if os.path.isfile(client_id):
with open(client_id, 'r') as secret_file:
client_id = secret_file.readline().rstrip()
2017-09-05 22:59:32 +02:00
params = dict()
2017-04-07 23:59:39 +02:00
params['client_id'] = client_id
params['response_type'] = "code"
params['redirect_uri'] = redirect_uris
params['scope'] = " ".join(scopes)
2017-04-07 23:59:39 +02:00
formatted_params = urlencode(params)
return "".join([self.api_base_url, "/oauth/authorize?", formatted_params])
2017-09-05 22:59:32 +02:00
def log_in(self, username=None, password=None,
code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None,
scopes=['read', 'write', 'follow'], to_file=None):
2017-04-07 23:59:39 +02:00
"""
2017-04-26 13:29:34 +02:00
Your username is the e-mail you use to log in into mastodon.
2017-09-05 22:59:32 +02:00
2017-04-26 13:29:34 +02:00
Can persist access token to file, to be used in the constructor.
2017-09-05 22:59:32 +02:00
2017-04-26 13:29:34 +02:00
Supports refresh_token but Mastodon.social doesn't implement it at the moment.
Handles password, authorization_code, and refresh_token authentication.
2017-09-05 22:59:32 +02:00
2017-04-26 13:29:34 +02:00
Will throw a MastodonIllegalArgumentError if username / password
are wrong, scopes are not valid or granted scopes differ from requested.
2017-04-07 23:59:39 +02:00
2017-09-05 23:07:24 +02:00
For OAuth2 documentation, compare
https://github.com/doorkeeper-gem/doorkeeper/wiki/Interacting-as-an-OAuth-client-with-Doorkeeper
2017-04-07 23:59:39 +02:00
2017-04-26 13:29:34 +02:00
Returns the access token.
2017-04-07 23:59:39 +02:00
"""
if username is not None and password is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'code', 'refresh_token'])
params['grant_type'] = 'password'
elif code is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'username', 'password', 'refresh_token'])
params['grant_type'] = 'authorization_code'
elif refresh_token is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'username', 'password', 'code'])
params['grant_type'] = 'refresh_token'
else:
raise MastodonIllegalArgumentError('Invalid arguments given. username and password or code are required.')
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
params['client_id'] = self.client_id
params['client_secret'] = self.client_secret
params['scope'] = " ".join(scopes)
2017-09-05 22:59:32 +02:00
2016-11-24 12:03:42 +01:00
try:
2017-09-05 22:59:32 +02:00
response = self.__api_request('POST', '/oauth/token', params, do_ratelimiting=False)
2016-11-24 12:03:42 +01:00
self.access_token = response['access_token']
2017-04-26 11:43:16 +02:00
self.__set_refresh_token(response.get('refresh_token'))
self.__set_token_expired(int(response.get('expires_in', 0)))
except Exception as e:
if username is not None or password is not None:
raise MastodonIllegalArgumentError('Invalid user name, password, or redirect_uris: %s' % e)
elif code is not None:
raise MastodonIllegalArgumentError('Invalid access token or redirect_uris: %s' % e)
2017-04-10 17:25:39 +02:00
else:
raise MastodonIllegalArgumentError('Invalid request: %s' % e)
2016-11-24 12:07:41 +01:00
requested_scopes = " ".join(sorted(scopes))
received_scopes = " ".join(sorted(response["scope"].split(" ")))
2016-11-24 12:03:42 +01:00
if requested_scopes != received_scopes:
2017-09-05 22:59:32 +02:00
raise MastodonAPIError(
'Granted scopes "' + received_scopes + '" differ from requested scopes "' + requested_scopes + '".')
2017-09-05 22:59:32 +02:00
if to_file is not None:
2016-11-23 23:30:51 +01:00
with open(to_file, 'w') as token_file:
token_file.write(response['access_token'] + '\n')
2016-11-23 23:30:51 +01:00
return response['access_token']
###
2017-06-15 22:48:23 +02:00
# Reading data: Instances
###
def instance(self):
"""
Retrieve basic information about the instance, including the URI and administrative contact email.
2017-06-15 22:48:23 +02:00
Returns an instance dict.
"""
return self.__api_request('GET', '/api/v1/instance/')
2016-11-23 23:30:51 +01:00
###
# Reading data: Timelines
##
2017-09-05 22:59:32 +02:00
def timeline(self, timeline="home", max_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch statuses, most recent ones first. Timeline can be 'home', 'local', 'public',
or 'tag/hashtag'. See the following functions documentation for what those do.
Local hashtag timelines are supported via the timeline_hashtag() function.
The default timeline is the "home" timeline.
Returns a list of toot dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-04-02 19:35:42 +02:00
params_initial = locals()
if timeline == "local":
timeline = "public"
params_initial['local'] = True
params = self.__generate_params(params_initial, ['timeline'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/timelines/{0}'.format(timeline)
return self.__api_request('GET', url, params)
2017-09-05 22:59:32 +02:00
def timeline_home(self, max_id=None, since_id=None, limit=None):
"""
Fetch the authenticated users home timeline (i.e. followed users and self).
Returns a list of toot dicts.
"""
2017-09-05 22:59:32 +02:00
return self.timeline('home', max_id=max_id, since_id=since_id,
limit=limit)
2017-09-05 22:59:32 +02:00
def timeline_local(self, max_id=None, since_id=None, limit=None):
2017-04-02 19:35:42 +02:00
"""
2017-04-25 11:41:48 +02:00
Fetches the local / instance-wide timeline, not including replies.
2017-04-02 19:35:42 +02:00
Returns a list of toot dicts.
"""
2017-09-05 22:59:32 +02:00
return self.timeline('local', max_id=max_id, since_id=since_id,
limit=limit)
2017-04-02 19:35:42 +02:00
2017-09-05 22:59:32 +02:00
def timeline_public(self, max_id=None, since_id=None, limit=None):
"""
2017-04-25 11:41:48 +02:00
Fetches the public / visible-network timeline, not including replies.
Returns a list of toot dicts.
"""
2017-09-05 22:59:32 +02:00
return self.timeline('public', max_id=max_id, since_id=since_id,
limit=limit)
def timeline_hashtag(self, hashtag, local=False, max_id=None, since_id=None, limit=None):
"""
Fetch a timeline of toots with a given hashtag. The hashtag parameter
should not contain the leading #.
2017-11-21 14:47:13 +01:00
Set "local" to True to retrieve only instance-local tagged posts.
Returns a list of toot dicts.
"""
if hashtag.startswith("#"):
raise MastodonIllegalArgumentError("Hashtag parameter should omit leading #")
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params_initial = locals()
if local == False:
del params_initial['local']
url = '/api/v1/timelines/tag/{0}'.format(hashtag)
params = self.__generate_params(params_initial, ['hashtag'])
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
###
# Reading data: Statuses
###
def status(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch information about a single toot.
Returns a toot dict.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
def status_card(self, id):
"""
2017-06-15 22:32:17 +02:00
Fetch a card associated with a status. A card describes an object (such as an
external video or link) embedded into a status.
Returns a card dict.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/card'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
def status_context(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch information about ancestors and descendants of a toot.
Returns a context dict.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/context'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
def status_reblogged_by(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch a list of users that have reblogged a status.
Returns a list of user dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/reblogged_by'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
def status_favourited_by(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch a list of users that have favourited a status.
Returns a list of user dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/favourited_by'.format(str(id))
return self.__api_request('GET', url)
###
# Reading data: Notifications
###
2017-09-05 22:59:32 +02:00
def notifications(self, id=None, max_id=None, since_id=None, limit=None):
"""
Fetch notifications (mentions, favourites, reblogs, follows) for the authenticated
user.
2017-04-26 11:56:47 +02:00
Can be passed an id to fetch a single notification.
2016-11-25 20:46:20 +01:00
Returns a list of notification dicts.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-09-05 22:59:32 +02:00
if id is None:
params = self.__generate_params(locals(), ['id'])
return self.__api_request('GET', '/api/v1/notifications', params)
2017-04-26 11:56:47 +02:00
else:
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/notifications/{0}'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
###
# Reading data: Accounts
###
def account(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch account information by user id.
Returns a user dict.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
def account_verify_credentials(self):
2016-11-24 00:36:00 +01:00
"""
Fetch authenticated user's account information.
Returns a user dict.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
return self.__api_request('GET', '/api/v1/accounts/verify_credentials')
2017-09-05 22:59:32 +02:00
def account_statuses(self, id, max_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch statuses by user id. Same options as timeline are permitted.
Returns a list of toot dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/statuses'.format(str(id))
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
2017-09-05 22:59:32 +02:00
def account_following(self, id, max_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch users the given user is following.
Returns a list of user dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/following'.format(str(id))
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
2017-09-05 22:59:32 +02:00
def account_followers(self, id, max_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch users the given user is followed by.
Returns a list of user dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/followers'.format(str(id))
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
def account_relationships(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch relationships (following, followed_by, blocking) of the logged in user to
2016-11-24 00:36:00 +01:00
a given account. id can be a list.
Returns a list of relationship dicts.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals())
2017-09-05 22:59:32 +02:00
return self.__api_request('GET', '/api/v1/accounts/relationships',
params)
2016-11-23 23:30:51 +01:00
2017-09-05 22:59:32 +02:00
def account_search(self, q, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch matching accounts. Will lookup an account remotely if the search term is
2016-11-24 00:36:00 +01:00
in the username@domain format and not yet in the database.
Returns a list of user dicts.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/accounts/search', params)
2017-04-07 15:12:24 +02:00
2017-04-26 11:56:47 +02:00
###
# Reading data: Searching
###
2017-09-05 22:59:32 +02:00
def search(self, q, resolve=False):
2017-04-07 15:12:24 +02:00
"""
Fetch matching hashtags, accounts and statuses. Will search federated
instances if resolve is True.
Returns a dict of lists.
"""
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/search', params)
###
# Reading data: Mutes and Blocks
###
2017-09-05 22:59:32 +02:00
def mutes(self, max_id=None, since_id=None, limit=None):
"""
Fetch a list of users muted by the authenticated user.
Returns a list of user dicts.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/mutes', params)
2017-09-05 22:59:32 +02:00
def blocks(self, max_id=None, since_id=None, limit=None):
"""
Fetch a list of users blocked by the authenticated user.
Returns a list of user dicts.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/blocks', params)
###
# Reading data: Reports
###
def reports(self):
"""
Fetch a list of reports made by the authenticated user.
Returns a list of report dicts.
2017-11-21 14:33:04 +01:00
Warning: According to the official API documentation, this
method is to be treated as not finalized as of Mastodon 2.0.0.
"""
return self.__api_request('GET', '/api/v1/reports')
###
# Reading data: Favourites
###
2017-09-05 22:59:32 +02:00
def favourites(self, max_id=None, since_id=None, limit=None):
"""
Fetch the authenticated user's favourited statuses.
Returns a list of toot dicts.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/favourites', params)
2017-04-03 05:53:32 +02:00
###
# Reading data: Follow requests
###
2017-09-05 22:59:32 +02:00
def follow_requests(self, max_id=None, since_id=None, limit=None):
2017-04-03 05:53:32 +02:00
"""
Fetch the authenticated user's incoming follow requests.
Returns a list of user dicts.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-04-03 05:53:32 +02:00
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/follow_requests', params)
2017-06-15 23:13:34 +02:00
###
# Reading data: Domain blocks
###
2017-09-05 22:59:32 +02:00
def domain_blocks(self, max_id=None, since_id=None, limit=None):
2017-06-15 23:13:34 +02:00
"""
2017-06-15 23:15:38 +02:00
Fetch the authenticated user's blocked domains.
2017-06-15 23:13:34 +02:00
2017-06-15 23:15:38 +02:00
Returns a list of blocked domain URLs (as strings, without protocol specifier).
2017-06-15 23:13:34 +02:00
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-06-15 23:13:34 +02:00
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
###
# Writing data: Statuses
###
2017-09-05 22:59:32 +02:00
def status_post(self, status, in_reply_to_id=None, media_ids=None,
sensitive=False, visibility='', spoiler_text=None):
2016-11-24 00:36:00 +01:00
"""
Post a status. Can optionally be in reply to another status and contain
up to four pieces of media (Uploaded via media_post()). media_ids can
also be the media dicts returned by media_post - they are unpacked
automatically.
The 'sensitive' boolean decides whether or not media attached to the post
should be marked as sensitive, which hides it by default on the Mastodon
web front-end.
The visibility parameter is a string value and matches the visibility
option on the /api/v1/status POST API endpoint. It accepts any of:
2017-06-15 22:32:17 +02:00
'direct' - post will be visible only to mentioned users
'private' - post will be visible only to followers
'unlisted' - post will be public but not appear on the public timeline
'public' - post will be public
If not passed in, visibility defaults to match the current account's
default-privacy setting (starting with Mastodon version 1.6) or its
locked setting - private if the account is locked, public otherwise
(for Mastodon versions lower than 1.6).
2017-04-03 03:46:43 +02:00
The spoiler_text parameter is a string to be shown as a warning before
the text of the status. If no text is passed in, no warning will be
displayed.
Returns a toot dict with the new status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
if in_reply_to_id != None:
in_reply_to_id = self.__unpack_id(in_reply_to_id)
params_initial = locals()
# Validate visibility parameter
2017-04-24 22:02:26 +02:00
valid_visibilities = ['private', 'public', 'unlisted', 'direct', '']
if params_initial['visibility'].lower() not in valid_visibilities:
2017-09-05 23:07:24 +02:00
raise ValueError('Invalid visibility value! Acceptable '
'values are %s' % valid_visibilities)
2017-09-05 22:59:32 +02:00
if params_initial['sensitive'] is False:
del [params_initial['sensitive']]
2017-09-05 22:59:32 +02:00
if media_ids is not None:
try:
media_ids_proper = []
for media_id in media_ids:
if isinstance(media_id, dict):
media_ids_proper.append(media_id["id"])
else:
media_ids_proper.append(media_id)
except Exception as e:
2017-09-05 23:07:24 +02:00
raise MastodonIllegalArgumentError("Invalid media "
"dict: %s" % e)
params_initial["media_ids"] = media_ids_proper
params = self.__generate_params(params_initial)
2016-11-23 23:30:51 +01:00
return self.__api_request('POST', '/api/v1/statuses', params)
def toot(self, status):
2016-11-24 00:36:00 +01:00
"""
2016-11-24 00:55:09 +01:00
Synonym for status_post that only takes the status text as input.
2017-09-08 14:49:57 +02:00
Usage in production code is not recommended.
Returns a toot dict with the new status.
2016-11-24 00:55:09 +01:00
"""
return self.status_post(status)
2016-11-23 23:30:51 +01:00
def status_delete(self, id):
2016-11-24 00:36:00 +01:00
"""
Delete a status
Returns an empty dict for good measure.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}'.format(str(id))
return self.__api_request('DELETE', url)
2016-11-23 23:30:51 +01:00
def status_reblog(self, id):
2017-09-08 16:34:11 +02:00
"""
Reblog a status.
2017-09-08 16:34:11 +02:00
Returns a toot dict with a new status that wraps around the reblogged one.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/reblog'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def status_unreblog(self, id):
2016-11-24 00:36:00 +01:00
"""
Un-reblog a status.
Returns a toot dict with the status that used to be reblogged.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/unreblog'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def status_favourite(self, id):
2016-11-24 00:36:00 +01:00
"""
Favourite a status.
Returns a toot dict with the favourited status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/favourite'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def status_unfavourite(self, id):
"""
Un-favourite a status.
Returns a toot dict with the un-favourited status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/unfavourite'.format(str(id))
return self.__api_request('POST', url)
2017-09-08 16:34:11 +02:00
def status_mute(self, id):
"""
Mute notifications for a status.
Returns a toot dict with the now muted status
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-08 16:34:11 +02:00
url = '/api/v1/statuses/{0}/mute'.format(str(id))
return self.__api_request('POST', url)
def status_unmute(self, id):
"""
Unmute notifications for a status.
Returns a toot dict with the status that used to be muted.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-08 16:34:11 +02:00
url = '/api/v1/statuses/{0}/unmute'.format(str(id))
return self.__api_request('POST', url)
2017-04-26 11:56:47 +02:00
###
# Writing data: Notifications
###
def notifications_clear(self):
"""
Clear out a users notifications
"""
return self.__api_request('POST', '/api/v1/notifications/clear')
2017-04-26 11:56:47 +02:00
2017-09-08 15:25:49 +02:00
def notifications_dismiss(self, id):
"""
Deletes a single notification
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/notifications/dismiss', params)
2017-09-08 15:25:49 +02:00
2016-11-23 23:30:51 +01:00
###
2016-11-24 03:03:52 +01:00
# Writing data: Accounts
2016-11-23 23:30:51 +01:00
###
def account_follow(self, id):
2016-11-24 00:36:00 +01:00
"""
Follow a user.
Returns a relationship dict containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/follow'.format(str(id))
return self.__api_request('POST', url)
2017-04-26 13:24:27 +02:00
def follows(self, uri):
"""
Follow a remote user by uri (username@domain).
Returns a user dict.
"""
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/follows', params)
2016-11-23 23:30:51 +01:00
def account_unfollow(self, id):
2016-11-24 00:36:00 +01:00
"""
Unfollow a user.
Returns a relationship dict containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unfollow'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def account_block(self, id):
2016-11-24 00:36:00 +01:00
"""
Block a user.
Returns a relationship dict containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/block'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def account_unblock(self, id):
2016-11-24 00:36:00 +01:00
"""
Unblock a user.
Returns a relationship dict containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unblock'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
def account_mute(self, id):
"""
Mute a user.
Returns a relationship dict containing the updated relationship to the user.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/mute'.format(str(id))
return self.__api_request('POST', url)
def account_unmute(self, id):
"""
Unmute a user.
Returns a relationship dict containing the updated relationship to the user.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unmute'.format(str(id))
return self.__api_request('POST', url)
2017-09-05 22:59:32 +02:00
def account_update_credentials(self, display_name=None, note=None,
avatar=None, header=None):
"""
Update the profile for the currently authenticated user.
'note' is the user's bio.
'avatar' and 'header' are images encoded in base64, prepended by a content-type
(for example: 'data:image/png;base64,iVBORw0KGgoAAAA[...]')
"""
params = self.__generate_params(locals())
return self.__api_request('PATCH', '/api/v1/accounts/update_credentials', params)
###
# Writing data: Reports
###
def report(self, account_id, status_ids, comment):
"""
2017-06-15 22:48:23 +02:00
Report statuses to the instances administrators.
Accepts a list of toot IDs associated with the report, and a comment.
Returns a report dict.
"""
2017-11-22 10:26:44 +01:00
account_id = self.__unpack_id(account_id)
status_ids = map(lambda x: self.__unpack_id(x), status_ids)
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/reports/', params)
2017-04-03 05:53:32 +02:00
###
# Writing data: Follow requests
###
def follow_request_authorize(self, id):
"""
Accept an incoming follow request.
Returns an empty dict.
2017-04-03 05:53:32 +02:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/follow_requests/{0}/authorize'.format(str(id))
return self.__api_request('POST', url)
2017-04-03 05:53:32 +02:00
def follow_request_reject(self, id):
"""
Reject an incoming follow request.
Returns an empty dict.
2017-04-03 05:53:32 +02:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/follow_requests/{0}/reject'.format(str(id))
return self.__api_request('POST', url)
2017-04-03 05:53:32 +02:00
2016-11-23 23:30:51 +01:00
###
# Writing data: Media
###
def media_post(self, media_file, mime_type=None, description=None):
2016-11-24 00:36:00 +01:00
"""
Post an image. media_file can either be image data or
2016-11-24 12:34:31 +01:00
a file name. If image data is passed directly, the mime
type has to be specified manually, otherwise, it is
determined from the file name.
Throws a MastodonIllegalArgumentError if the mime type of the
2016-11-25 18:17:39 +01:00
passed data or file can not be determined properly.
Returns a media dict. This contains the id that can be used in
status_post to attach the media file to a toot.
2016-11-24 00:55:09 +01:00
"""
2017-09-05 22:59:32 +02:00
if mime_type is None and os.path.isfile(media_file):
2016-11-24 12:34:31 +01:00
mime_type = mimetypes.guess_type(media_file)[0]
2016-11-23 23:30:51 +01:00
media_file = open(media_file, 'rb')
2017-09-05 22:59:32 +02:00
if mime_type is None:
2017-09-05 23:07:24 +02:00
raise MastodonIllegalArgumentError('Could not determine mime type'
' or data passed directly '
'without mime type.')
2016-11-24 12:34:31 +01:00
random_suffix = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
2017-09-05 22:59:32 +02:00
file_name = "mastodonpyupload_" + str(time.time()) + "_" + str(random_suffix) + mimetypes.guess_extension(
mime_type)
2016-11-24 12:34:31 +01:00
media_file_description = (file_name, media_file, mime_type)
2017-09-05 22:59:32 +02:00
return self.__api_request('POST', '/api/v1/media',
files={'file': media_file_description},
params={'description': description})
2017-06-15 23:13:34 +02:00
###
# Writing data: Domain blocks
###
2017-09-05 22:59:32 +02:00
def domain_block(self, domain=None):
2017-06-15 23:13:34 +02:00
"""
Add a block for all statuses originating from the specified domain for the logged-in user.
"""
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
def domain_unblock(self, domain=None):
2017-06-15 23:13:34 +02:00
"""
Remove a domain block for the logged-in user.
"""
params = self.__generate_params(locals())
return self.__api_request('DELETE', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
###
# Pagination
###
def fetch_next(self, previous_page):
"""
Fetches the next page of results of a paginated request. Pass in the
2017-09-05 22:59:32 +02:00
previous page in its entirety, or the pagination information dict
2017-06-16 01:23:19 +02:00
returned as a part of that pages last status ('_pagination_next').
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Returns the next page or None if no further data is available.
"""
2017-09-08 16:40:43 +02:00
if isinstance(previous_page, list) and len(previous_page) != 0:
2017-06-16 01:23:19 +02:00
if '_pagination_next' in previous_page[-1]:
params = copy.deepcopy(previous_page[-1]['_pagination_next'])
2017-06-16 01:23:19 +02:00
else:
return None
else:
params = copy.deepcopy(previous_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
method = params['_pagination_method']
del params['_pagination_method']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
endpoint = params['_pagination_endpoint']
del params['_pagination_endpoint']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return self.__api_request(method, endpoint, params)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
def fetch_previous(self, next_page):
"""
Fetches the previous page of results of a paginated request. Pass in the
2017-09-05 22:59:32 +02:00
previous page in its entirety, or the pagination information dict
2017-06-16 01:23:19 +02:00
returned as a part of that pages first status ('_pagination_prev').
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Returns the previous page or None if no further data is available.
"""
2017-09-08 16:41:39 +02:00
if isinstance(next_page, list) and len(next_page) != 0:
if '_pagination_prev' in next_page[0]:
params = copy.deepcopy(next_page[0]['_pagination_prev'])
2017-06-16 01:23:19 +02:00
else:
return None
else:
params = copy.deepcopy(next_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
method = params['_pagination_method']
del params['_pagination_method']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
endpoint = params['_pagination_endpoint']
del params['_pagination_endpoint']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return self.__api_request(method, endpoint, params)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
def fetch_remaining(self, first_page):
"""
2017-09-05 22:59:32 +02:00
Fetches all the remaining pages of a paginated request starting from a
2017-06-16 01:23:19 +02:00
first page and returns the entire set of results (including the first page
that was passed in) as a big list.
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Be careful, as this might generate a lot of requests, depending on what you are
fetching, and might cause you to run into rate limits very quickly.
"""
first_page = copy.deepcopy(first_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
all_pages = []
current_page = first_page
2017-09-05 22:59:32 +02:00
while current_page is not None and len(current_page) > 0:
2017-06-16 01:23:19 +02:00
all_pages.extend(current_page)
current_page = self.fetch_next(current_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return all_pages
2017-09-05 22:59:32 +02:00
2017-04-26 12:19:41 +02:00
###
# Streaming
###
2017-11-24 15:25:38 +01:00
def stream_user(self, listener, async=False):
"""
Streams events that are relevant to the authorized user, i.e. home
timeline and notifications. 'listener' should be a subclass of
StreamListener which will receive callbacks for incoming events.
"""
return self.__stream('/api/v1/streaming/user', listener, async=async)
2017-11-24 15:25:38 +01:00
def stream_public(self, listener, async=False):
"""
Streams public events. 'listener' should be a subclass of StreamListener
which will receive callbacks for incoming events.
"""
return self.__stream('/api/v1/streaming/public', listener, async=async)
2017-11-24 15:25:38 +01:00
def stream_local(self, listener, async=False):
"""
Streams local events. 'listener' should be a subclass of StreamListener
which will receive callbacks for incoming events.
"""
return self.__stream('/api/v1/streaming/public/local', listener, async=async)
2017-11-24 15:25:38 +01:00
def stream_hashtag(self, tag, listener, async=False):
"""
Returns all public statuses for the hashtag 'tag'. 'listener' should be
a subclass of StreamListener which will receive callbacks for incoming
events.
"""
2017-11-29 18:04:54 +01:00
if tag.startswith("#"):
raise MastodonIllegalArgumentError("Tag parameter should omit leading #")
return self.__stream("/api/v1/streaming/hashtag?tag={}".format(tag), listener)
2016-11-23 23:30:51 +01:00
###
# Internal helpers, dragons probably
###
2016-11-25 23:14:00 +01:00
def __datetime_to_epoch(self, date_time):
2016-11-24 00:36:00 +01:00
"""
2016-11-25 23:14:00 +01:00
Converts a python datetime to unix epoch, accounting for
time zones and such.
2016-11-25 23:14:00 +01:00
Assumes UTC if timezone is not given.
"""
date_time_utc = None
2017-09-05 22:59:32 +02:00
if date_time.tzinfo is None:
date_time_utc = date_time.replace(tzinfo=pytz.utc)
2016-11-25 23:14:00 +01:00
else:
date_time_utc = date_time.astimezone(pytz.utc)
2017-09-05 22:59:32 +02:00
epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
2016-11-25 23:14:00 +01:00
return (date_time_utc - epoch_utc).total_seconds()
@staticmethod
def __json_date_parse(json_object):
2017-09-08 16:27:16 +02:00
"""
Parse dates in certain known json fields, if possible.
"""
known_date_fields = ["created_at"]
for k, v in json_object.items():
if k in known_date_fields:
try:
if isinstance(v, int):
json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc)
else:
json_object[k] = dateutil.parser.parse(v)
2017-09-08 16:27:16 +02:00
except:
raise MastodonAPIError('Encountered invalid date.')
return json_object
@staticmethod
def __json_id_to_bignum(json_object):
"""
Converts json string IDs to native python bignums.
"""
for key in ('id', 'in_reply_to_id', 'in_reply_to_account_id'):
if (key in json_object and
isinstance(json_object[key], six.text_type)):
try:
json_object[key] = int(json_object[key])
except ValueError:
pass
return json_object
@staticmethod
def __json_hooks(json_object):
json_object = Mastodon.__json_date_parse(json_object)
json_object = Mastodon.__json_id_to_bignum(json_object)
return json_object
2017-09-05 22:59:32 +02:00
def __api_request(self, method, endpoint, params={}, files={}, do_ratelimiting=True):
2016-11-25 23:14:00 +01:00
"""
Internal API request helper.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
response = None
headers = None
2017-09-05 22:59:32 +02:00
remaining_wait = 0
2016-11-25 18:17:39 +01:00
# "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it
# would take to not hit the rate limit at that request rate.
if do_ratelimiting and self.ratelimit_method == "pace":
2016-11-25 18:17:39 +01:00
if self.ratelimit_remaining == 0:
to_next = self.ratelimit_reset - time.time()
if to_next > 0:
2016-11-25 23:14:00 +01:00
# As a precaution, never sleep longer than 5 minutes
to_next = min(to_next, 5 * 60)
2016-11-25 18:17:39 +01:00
time.sleep(to_next)
else:
time_waited = time.time() - self.ratelimit_lastcall
time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining)
remaining_wait = time_wait - time_waited
2016-11-25 18:17:39 +01:00
if remaining_wait > 0:
2016-11-25 23:14:00 +01:00
to_next = remaining_wait / self.ratelimit_pacefactor
to_next = min(to_next, 5 * 60)
time.sleep(to_next)
2016-11-25 18:17:39 +01:00
# Generate request headers
2017-09-05 22:59:32 +02:00
if self.access_token is not None:
2016-11-23 23:30:51 +01:00
headers = {'Authorization': 'Bearer ' + self.access_token}
2017-09-05 22:59:32 +02:00
if self.debug_requests:
2016-11-24 20:03:08 +01:00
print('Mastodon: Request to endpoint "' + endpoint + '" using method "' + method + '".')
print('Parameters: ' + str(params))
print('Headers: ' + str(headers))
print('Files: ' + str(files))
2016-11-25 18:17:39 +01:00
# Make request
request_complete = False
while not request_complete:
request_complete = True
2016-11-25 18:17:39 +01:00
response_object = None
try:
2017-11-21 13:53:42 +01:00
kwargs = dict(headers=headers, files=files,
timeout=self.request_timeout)
2016-11-25 18:17:39 +01:00
if method == 'GET':
2017-11-21 13:53:42 +01:00
kwargs['params'] = params
else:
kwargs['data'] = params
response_object = requests.request(
method, self.api_base_url + endpoint, **kwargs)
except Exception as e:
raise MastodonNetworkError("Could not complete request: %s" % e)
2017-09-05 22:59:32 +02:00
if response_object is None:
2016-11-25 18:17:39 +01:00
raise MastodonIllegalArgumentError("Illegal request.")
# Parse rate limiting headers
if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting:
self.ratelimit_remaining = int(response_object.headers['X-RateLimit-Remaining'])
self.ratelimit_limit = int(response_object.headers['X-RateLimit-Limit'])
try:
ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset'])
self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime)
# Adjust server time to local clock
if 'Date' in response_object.headers:
server_time_datetime = dateutil.parser.parse(response_object.headers['Date'])
server_time = self.__datetime_to_epoch(server_time_datetime)
server_time_diff = time.time() - server_time
self.ratelimit_reset += server_time_diff
self.ratelimit_lastcall = time.time()
except Exception as e:
raise MastodonRatelimitError("Rate limit time calculations failed: %s" % e)
2016-11-25 18:17:39 +01:00
# Handle response
2017-09-05 22:59:32 +02:00
if self.debug_requests:
2016-11-25 18:17:39 +01:00
print('Mastodon: Response received with code ' + str(response_object.status_code) + '.')
2017-04-01 15:04:45 +02:00
print('response headers: ' + str(response_object.headers))
2016-11-25 18:17:39 +01:00
print('Response text content: ' + str(response_object.text))
2016-11-25 18:17:39 +01:00
if response_object.status_code == 404:
2017-09-08 16:27:16 +02:00
try:
response = response_object.json()
except:
raise MastodonAPIError('Endpoint not found.')
2017-09-08 16:27:16 +02:00
if isinstance(response, dict) and 'error' in response:
raise MastodonAPIError("Mastodon API returned error: " + str(response['error']))
else:
raise MastodonAPIError('Endpoint not found.')
2016-11-25 18:17:39 +01:00
if response_object.status_code == 500:
raise MastodonAPIError('General API problem.')
# Handle rate limiting
if response_object.status_code == 429:
if self.ratelimit_method == 'throw' or not do_ratelimiting:
raise MastodonRatelimitError('Hit rate limit.')
elif self.ratelimit_method in ('wait', 'pace'):
to_next = self.ratelimit_reset - time.time()
if to_next > 0:
# As a precaution, never sleep longer than 5 minutes
to_next = min(to_next, 5 * 60)
time.sleep(to_next)
request_complete = False
continue
2016-11-25 18:17:39 +01:00
try:
response = response_object.json(object_hook=self.__json_hooks)
2016-11-25 18:17:39 +01:00
except:
2017-09-05 22:59:32 +02:00
raise MastodonAPIError(
2017-09-05 23:07:24 +02:00
"Could not parse response as JSON, response code was %s, "
"bad json content was '%s'" % (response_object.status_code,
response_object.content))
2017-09-08 16:27:16 +02:00
# See if the returned dict is an error dict even though status is 200
if isinstance(response, dict) and 'error' in response:
raise MastodonAPIError("Mastodon API returned error: " + str(response['error']))
2017-06-16 01:23:19 +02:00
# Parse link headers
2017-09-05 22:59:32 +02:00
if isinstance(response, list) and \
'Link' in response_object.headers and \
response_object.headers['Link'] != "":
tmp_urls = requests.utils.parse_header_links(
response_object.headers['Link'].rstrip('>').replace('>,<', ',<'))
2017-06-16 01:23:19 +02:00
for url in tmp_urls:
2017-09-05 23:07:24 +02:00
if 'rel' not in url:
continue
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if url['rel'] == 'next':
# Be paranoid and extract max_id specifically
next_url = url['url']
matchgroups = re.search(r"max_id=([0-9]*)", next_url)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if matchgroups:
next_params = copy.deepcopy(params)
next_params['_pagination_method'] = method
next_params['_pagination_endpoint'] = endpoint
next_params['max_id'] = int(matchgroups.group(1))
if "since_id" in next_params:
del next_params['since_id']
2017-06-16 01:23:19 +02:00
response[-1]['_pagination_next'] = next_params
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if url['rel'] == 'prev':
# Be paranoid and extract since_id specifically
prev_url = url['url']
matchgroups = re.search(r"since_id=([0-9]*)", prev_url)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if matchgroups:
prev_params = copy.deepcopy(params)
prev_params['_pagination_method'] = method
prev_params['_pagination_endpoint'] = endpoint
prev_params['since_id'] = int(matchgroups.group(1))
if "max_id" in prev_params:
del prev_params['max_id']
2017-06-16 01:23:19 +02:00
response[0]['_pagination_prev'] = prev_params
2017-09-05 22:59:32 +02:00
2016-11-24 12:03:42 +01:00
return response
def __stream(self, endpoint, listener, params={}, async=False):
"""
Internal streaming API helper.
Returns a handle to the open connection that the user can close if they
wish to terminate it.
"""
# Check if we have to redirect
instance = self.instance()
if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url:
# This is probably a websockets URL, which is really for the browser, but requests can't handle it
# So we do this below to turn it into an HTTPS or HTTP URL
parse = urlparse(instance["urls"]["streaming_api"])
if parse.scheme == 'wss':
url = "https://" + parse.netloc
elif parse.scheme == 'ws':
url = "http://" + parse.netloc
else:
raise MastodonAPIError(
"Could not parse streaming api location returned from server: {}.".format(
instance["urls"]["streaming_api"]))
else:
url = self.api_base_url
# The streaming server can't handle two slashes in a path, so remove trailing slashes
if url[-1] == '/':
url = url[:-1]
headers = {"Authorization": "Bearer " + self.access_token}
connection = requests.get(url + endpoint, headers = headers, data = params, stream = True)
if connection.status_code != 200:
raise MastodonNetworkError("Could not connect to streaming server: %s" % connection.reason)
class __stream_handle():
def __init__(self, connection):
self.connection = connection
def close(self):
self.connection.close()
2017-11-24 18:45:47 +01:00
def is_alive(self):
return self._thread.is_alive()
def _threadproc(self):
2017-11-24 19:09:38 +01:00
self._thread = threading.current_thread()
with closing(connection) as r:
try:
listener.handle_stream(r.iter_lines())
except AttributeError as e:
# Eat AttributeError from requests if user closes early
pass
return 0
handle = __stream_handle(connection)
if async:
t = threading.Thread(args=(), target=handle._threadproc)
t.start()
return handle
else:
# Blocking, never returns (can only leave via exception)
with closing(connection) as r:
listener.handle_stream(r.iter_lines())
2017-09-05 22:59:32 +02:00
def __generate_params(self, params, exclude=[]):
2016-11-24 00:36:00 +01:00
"""
2016-11-24 00:55:09 +01:00
Internal named-parameters-to-dict helper.
Note for developers: If called with locals() as params,
as is the usual practice in this code, the __generate_params call
(or at least the locals() call) should generally be the first thing
in your function.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
params = dict(params)
2016-11-23 23:30:51 +01:00
del params['self']
param_keys = list(params.keys())
for key in param_keys:
2017-09-05 22:59:32 +02:00
if params[key] is None or key in exclude:
2016-11-23 23:30:51 +01:00
del params[key]
2016-11-23 23:30:51 +01:00
param_keys = list(params.keys())
for key in param_keys:
if isinstance(params[key], list):
params[key + "[]"] = params[key]
del params[key]
2016-11-23 23:30:51 +01:00
return params
2017-11-22 10:26:44 +01:00
def __unpack_id(self, id):
"""
Internal object-to-id converter
Checks if id is a dict that contains id and
returns the id inside, otherwise just returns
the id straight.
"""
if isinstance(id, dict) and "id" in id:
return id["id"]
else:
return id
2017-04-26 12:20:44 +02:00
def __get_token_expired(self):
"""Internal helper for oauth code"""
2017-09-05 22:59:32 +02:00
return self._token_expired < datetime.datetime.now()
2017-04-26 12:20:44 +02:00
def __set_token_expired(self, value):
"""Internal helper for oauth code"""
self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value)
return
2017-09-05 22:59:32 +02:00
2017-04-26 12:20:44 +02:00
def __get_refresh_token(self):
"""Internal helper for oauth code"""
return self._refresh_token
2017-09-05 22:59:32 +02:00
2017-04-26 12:20:44 +02:00
def __set_refresh_token(self, value):
"""Internal helper for oauth code"""
self._refresh_token = value
return
2017-09-05 22:59:32 +02:00
@staticmethod
def __protocolize(base_url):
"""Internal add-protocol-to-url helper"""
if not base_url.startswith("http://") and not base_url.startswith("https://"):
base_url = "https://" + base_url
# Some API endpoints can't handle extra /'s in path requests
base_url = base_url.rstrip("/")
return base_url
2017-04-26 12:20:44 +02:00
2017-09-05 22:59:32 +02:00
2016-11-25 18:17:39 +01:00
##
# Exceptions
##
class MastodonError(Exception):
"""Base class for Mastodon.py exceptions"""
class MastodonIllegalArgumentError(ValueError, MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when an incorrect parameter is passed to a function"""
2016-11-25 18:17:39 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonIOError(IOError, MastodonError):
"""Base class for Mastodon.py I/O errors"""
class MastodonFileNotFoundError(MastodonIOError):
2017-11-24 14:20:27 +01:00
"""Raised when a file requested to be loaded can not be opened"""
2016-11-25 23:14:00 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonNetworkError(MastodonIOError):
2017-11-24 14:20:27 +01:00
"""Raised when network communication with the server fails"""
2016-11-25 18:17:39 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonAPIError(MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when the mastodon API generates a response that cannot be handled"""
2016-11-25 18:17:39 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonRatelimitError(MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when rate limiting is set to manual mode and the rate limit is exceeded"""
pass
class MastodonMalformedEventError(MastodonError):
"""Raised when the server-sent event stream is malformed"""
2016-11-25 18:17:39 +01:00
pass