Mastodonplus.py/mastodon/Mastodon.py

3429 líneas
135 KiB
Python
Original Vista normal Històric

2016-11-23 23:30:51 +01:00
# coding: utf-8
import os
import os.path
2016-11-24 12:34:31 +01:00
import mimetypes
import time
import random
import string
2016-11-25 23:14:00 +01:00
import datetime
2018-06-05 01:54:12 +02:00
import collections
from contextlib import closing
import pytz
2017-04-07 23:59:39 +02:00
import requests
2017-04-26 11:43:16 +02:00
from requests.models import urlencode
import dateutil
import dateutil.parser
2017-06-16 01:23:19 +02:00
import re
import copy
import threading
import sys
import six
2017-12-11 11:49:14 +01:00
from decorator import decorate
2019-06-22 21:39:11 +02:00
import hashlib
2019-05-31 14:12:04 +02:00
IMPL_HAS_CRYPTO = True
try:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import ec
except:
IMPL_HAS_CRYPTO = False
IMPL_HAS_ECE = True
try:
import http_ece
except:
IMPL_HAS_ECE = False
2018-06-05 14:10:53 +02:00
import base64
import json
2019-06-12 20:40:53 +02:00
IMPL_HAS_BLURHASH = True
try:
import blurhash
except:
IMPL_HAS_BLURHASH = False
2017-11-24 15:08:34 +01:00
try:
from urllib.parse import urlparse
except ImportError:
2017-11-24 15:08:34 +01:00
from urlparse import urlparse
try:
import magic
except ImportError:
magic = None
###
# Version check functions, including decorator and parser
###
2017-12-11 14:00:14 +01:00
def parse_version_string(version_string):
"""Parses a semver version string, stripping off "rc" stuff if present."""
string_parts = version_string.split(".")
version_parts = [
int(re.match("([0-9]*)", string_parts[0]).group(0)),
int(re.match("([0-9]*)", string_parts[1]).group(0)),
int(re.match("([0-9]*)", string_parts[2]).group(0))
]
return version_parts
2018-05-07 00:53:13 +02:00
def bigger_version(version_string_a, version_string_b):
"""Returns the bigger version of two version strings."""
major_a, minor_a, patch_a = parse_version_string(version_string_a)
major_b, minor_b, patch_b = parse_version_string(version_string_b)
if major_a > major_b:
return version_string_a
elif major_a == major_b and minor_a > minor_b:
return version_string_a
elif major_a == major_b and minor_a == minor_b and patch_a > patch_b:
return version_string_a
return version_string_b
def api_version(created_ver, last_changed_ver, return_value_ver):
2017-12-11 14:00:14 +01:00
"""Version check decorator. Currently only checks Bigger Than."""
2017-12-11 11:49:14 +01:00
def api_min_version_decorator(function):
2017-12-11 14:00:14 +01:00
def wrapper(function, self, *args, **kwargs):
2017-12-14 00:27:34 +01:00
if not self.version_check_mode == "none":
if self.version_check_mode == "created":
2017-12-14 13:32:52 +01:00
version = created_ver
2017-12-14 00:27:34 +01:00
else:
2018-05-07 00:53:13 +02:00
version = bigger_version(last_changed_ver, return_value_ver)
2017-12-14 13:32:52 +01:00
major, minor, patch = parse_version_string(version)
if major > self.mastodon_major:
raise MastodonVersionError("Version check failed (Need version " + version + ")")
elif major == self.mastodon_major and minor > self.mastodon_minor:
2018-06-05 22:52:18 +02:00
print(self.mastodon_minor)
2017-12-14 13:32:52 +01:00
raise MastodonVersionError("Version check failed (Need version " + version + ")")
elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch:
2018-07-30 21:14:40 +02:00
raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")")
return function(self, *args, **kwargs)
2017-12-14 00:27:34 +01:00
function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + created_ver + ", last changed: Mastodon v" + last_changed_ver + "*"
2017-12-11 11:49:14 +01:00
return decorate(function, wrapper)
return api_min_version_decorator
###
# Dict helper class.
# Defined at top level so it can be pickled.
###
class AttribAccessDict(dict):
def __getattr__(self, attr):
if attr in self:
return self[attr]
else:
raise AttributeError("Attribute not found: " + str(attr))
2018-06-04 16:48:20 +02:00
def __setattr__(self, attr, val):
if attr in self:
raise AttributeError("Attribute-style access is read only")
2018-06-04 19:56:46 +02:00
super(AttribAccessDict, self).__setattr__(attr, val)
###
# The actual Mastodon class
###
2016-11-23 23:30:51 +01:00
class Mastodon:
"""
2018-06-05 17:47:26 +02:00
Thorough and easy to use Mastodon
2016-11-24 00:36:00 +01:00
api wrapper in python.
2016-11-24 00:36:00 +01:00
If anything is unclear, check the official API docs at
2017-06-15 22:32:17 +02:00
https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md
2016-11-23 23:30:51 +01:00
"""
__DEFAULT_BASE_URL = 'https://mastodon.social'
2016-12-13 17:17:33 +01:00
__DEFAULT_TIMEOUT = 300
2018-04-08 23:06:09 +02:00
__DEFAULT_STREAM_TIMEOUT = 300
2018-04-17 14:55:31 +02:00
__DEFAULT_STREAM_RECONNECT_WAIT_SEC = 5
2018-07-30 13:35:29 +02:00
__DEFAULT_SCOPES = ['read', 'write', 'follow', 'push']
__SCOPE_SETS = {
'read': [
'read:accounts',
'read:blocks',
'read:favourites',
'read:filters',
'read:follows',
'read:lists',
'read:mutes',
'read:notifications',
'read:search',
'read:statuses'
],
'write': [
'write:accounts',
'write:blocks',
'write:favourites',
'write:filters',
'write:follows',
'write:lists',
'write:media',
'write:mutes',
'write:notifications',
'write:reports',
'write:statuses',
],
'follow': [
'read:blocks',
'read:follows',
'read:mutes',
'write:blocks',
'write:follows',
'write:mutes',
],
'admin:read': [
'admin:read:accounts',
'admin:read:reports',
],
'admin:write': [
'admin:write:accounts',
'admin:write:reports',
],
2018-07-30 13:35:29 +02:00
}
__VALID_SCOPES = ['read', 'write', 'follow', 'push', 'admin:read', 'admin:write'] + \
__SCOPE_SETS['read'] + __SCOPE_SETS['write'] + __SCOPE_SETS['admin:read'] + __SCOPE_SETS['admin:write']
2018-07-30 13:35:29 +02:00
__SUPPORTED_MASTODON_VERSION = "2.9.2"
2018-05-07 00:53:13 +02:00
# Dict versions
__DICT_VERSION_APPLICATION = "2.7.2"
2018-05-07 00:53:13 +02:00
__DICT_VERSION_MENTION = "1.0.0"
2019-05-10 01:43:52 +02:00
__DICT_VERSION_MEDIA = "2.8.2"
2018-06-05 01:54:12 +02:00
__DICT_VERSION_ACCOUNT = "2.4.0"
2019-04-28 23:12:27 +02:00
__DICT_VERSION_POLL = "2.8.0"
2019-06-22 17:33:12 +02:00
__DICT_VERSION_STATUS = bigger_version(bigger_version(bigger_version(bigger_version(bigger_version("2.9.1",
2019-04-28 23:12:27 +02:00
__DICT_VERSION_MEDIA), __DICT_VERSION_ACCOUNT), __DICT_VERSION_APPLICATION), __DICT_VERSION_MENTION), __DICT_VERSION_POLL)
2019-06-22 21:07:52 +02:00
__DICT_VERSION_INSTANCE = bigger_version("2.9.2", __DICT_VERSION_ACCOUNT)
2018-07-30 15:44:09 +02:00
__DICT_VERSION_HASHTAG = "2.3.4"
2018-05-07 00:53:13 +02:00
__DICT_VERSION_EMOJI = "2.1.0"
2019-04-27 21:45:57 +02:00
__DICT_VERSION_RELATIONSHIP = "2.5.0"
2018-05-07 00:53:13 +02:00
__DICT_VERSION_NOTIFICATION = bigger_version(bigger_version("1.0.0", __DICT_VERSION_ACCOUNT), __DICT_VERSION_STATUS)
__DICT_VERSION_CONTEXT = bigger_version("1.0.0", __DICT_VERSION_STATUS)
__DICT_VERSION_LIST = "2.1.0"
__DICT_VERSION_CARD = "2.0.0"
__DICT_VERSION_SEARCHRESULT = bigger_version(bigger_version(bigger_version("1.0.0",
__DICT_VERSION_ACCOUNT), __DICT_VERSION_STATUS), __DICT_VERSION_HASHTAG)
__DICT_VERSION_ACTIVITY = "2.1.2"
2019-06-22 21:56:08 +02:00
__DICT_VERSION_REPORT = "2.9.1"
2018-06-05 14:10:53 +02:00
__DICT_VERSION_PUSH = "2.4.0"
2018-06-05 17:19:15 +02:00
__DICT_VERSION_PUSH_NOTIF = "2.4.0"
2018-07-30 17:29:37 +02:00
__DICT_VERSION_FILTER = "2.4.3"
2019-04-28 13:47:43 +02:00
__DICT_VERSION_CONVERSATION = bigger_version(bigger_version("2.6.0", __DICT_VERSION_ACCOUNT), __DICT_VERSION_STATUS)
__DICT_VERSION_SCHEDULED_STATUS = bigger_version("2.7.0", __DICT_VERSION_STATUS)
2019-04-28 21:53:01 +02:00
__DICT_VERSION_PREFERENCES = "2.8.0"
2019-06-22 21:02:07 +02:00
__DICT_VERSION_ADMIN_ACCOUNT = "2.9.1"
2018-05-07 00:53:13 +02:00
2016-11-23 23:30:51 +01:00
###
# Registering apps
###
@staticmethod
2018-07-30 13:35:29 +02:00
def create_app(client_name, scopes=__DEFAULT_SCOPES, redirect_uris=None, website=None, to_file=None,
api_base_url=__DEFAULT_BASE_URL, request_timeout=__DEFAULT_TIMEOUT, session=None):
2016-11-24 00:36:00 +01:00
"""
2018-07-30 13:35:29 +02:00
Create a new app with given `client_name` and `scopes` (The basic scropse are "read", "write", "follow" and "push"
- more granular scopes are available, please refere to Mastodon documentation for which).
2019-04-28 01:02:08 +02:00
Specify `redirect_uris` if you want users to be redirected to a certain page after authenticating in an oauth flow.
You can specify multiple URLs by passing a list. Note that if you wish to use OAuth authentication with redirects,
the redirect URI must be one of the URLs specified here.
2017-12-13 18:40:17 +01:00
Specify `to_file` to persist your apps info to a file so you can use them in the constructor.
2019-04-28 01:02:08 +02:00
Specify `api_base_url` if you want to register an app on an instance different from the flagship one.
Specify `website` to give a website for your app.
Specify `session` with a requests.Session for it to be used instead of the deafult. This can be
used to, amongst other things, adjust proxy or ssl certificate settings.
2016-11-25 18:17:39 +01:00
Presently, app registration is open by default, but this is not guaranteed to be the case for all
future mastodon instances or even the flagship instance in the future.
2019-04-28 01:02:08 +02:00
2017-12-13 18:40:17 +01:00
Returns `client_id` and `client_secret`, both as strings.
2016-11-23 23:30:51 +01:00
"""
api_base_url = Mastodon.__protocolize(api_base_url)
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
request_data = {
'client_name': client_name,
'scopes': " ".join(scopes)
}
2016-12-13 17:17:33 +01:00
try:
2017-04-07 23:59:39 +02:00
if redirect_uris is not None:
2019-04-28 01:02:08 +02:00
if isinstance(redirect_uris, (list, tuple)):
redirect_uris = "\n".join(list(redirect_uris))
2017-09-05 22:59:32 +02:00
request_data['redirect_uris'] = redirect_uris
2016-12-13 17:17:33 +01:00
else:
2017-09-05 22:59:32 +02:00
request_data['redirect_uris'] = 'urn:ietf:wg:oauth:2.0:oob'
2017-04-07 23:59:39 +02:00
if website is not None:
request_data['website'] = website
if session:
ret = session.post(api_base_url + '/api/v1/apps', data=request_data, timeout=request_timeout)
response = ret.json()
else:
response = requests.post(api_base_url + '/api/v1/apps', data=request_data, timeout=request_timeout)
response = response.json()
except Exception as e:
raise MastodonNetworkError("Could not complete request: %s" % e)
2017-09-05 22:59:32 +02:00
if to_file is not None:
2016-11-23 23:30:51 +01:00
with open(to_file, 'w') as secret_file:
secret_file.write(response['client_id'] + '\n')
secret_file.write(response['client_secret'] + '\n')
2016-11-23 23:30:51 +01:00
return (response['client_id'], response['client_secret'])
2016-11-23 23:30:51 +01:00
###
# Authentication, including constructor
###
def __init__(self, client_id=None, client_secret=None, access_token=None,
2017-09-05 23:07:24 +02:00
api_base_url=__DEFAULT_BASE_URL, debug_requests=False,
ratelimit_method="wait", ratelimit_pacefactor=1.1,
2017-12-14 00:27:34 +01:00
request_timeout=__DEFAULT_TIMEOUT, mastodon_version=None,
version_check_mode = "created", session=None):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Create a new API wrapper instance based on the given `client_secret` and `client_id`. If you
2018-06-05 17:57:22 +02:00
give a `client_id` and it is not a file, you must also give a secret. If you specify an
`access_token` then you don't need to specify a `client_id`. It is allowed to specify
neither - in this case, you will be restricted to only using endpoints that do not
require authentication.
2017-12-13 18:40:17 +01:00
You can also specify an `access_token`, directly or as a file (as written by `log_in()`_).
2017-12-13 18:40:17 +01:00
Mastodon.py can try to respect rate limits in several ways, controlled by `ratelimit_method`.
"throw" makes functions throw a `MastodonRatelimitError` when the rate
2016-11-25 18:17:39 +01:00
limit is hit. "wait" mode will, once the limit is hit, wait and retry the request as soon
as the rate limit resets, until it succeeds. "pace" works like throw, but tries to wait in
between calls so that the limit is generally not hit (How hard it tries to not hit the rate
2016-11-25 18:17:39 +01:00
limit can be controlled by ratelimit_pacefactor). The default setting is "wait". Note that
even in "wait" and "pace" mode, requests can still fail due to network or other problems! Also
note that "pace" and "wait" are NOT thread safe.
2017-12-13 18:40:17 +01:00
Specify `api_base_url` if you wish to talk to an instance other than the flagship one.
If a file is given as `client_id`, client ID and secret are read from that file.
By default, a timeout of 300 seconds is used for all requests. If you wish to change this,
2017-12-13 18:40:17 +01:00
pass the desired timeout (in seconds) as `request_timeout`.
For fine-tuned control over the requests object use `session` with a requests.Session.
2017-12-13 18:40:17 +01:00
The `mastodon_version` parameter can be used to specify the version of Mastodon that Mastodon.py will
2017-12-11 11:49:14 +01:00
expect to be installed on the server. The function will throw an error if an unparseable
2017-12-13 18:40:17 +01:00
Version is specified. If no version is specified, Mastodon.py will set `mastodon_version` to the
detected version.
2017-12-14 00:27:34 +01:00
2017-12-14 13:29:42 +01:00
The version check mode can be set to "created" (the default behaviour), "changed" or "none". If set to
2017-12-14 00:27:34 +01:00
"created", Mastodon.py will throw an error if the version of Mastodon it is connected to is too old
to have an endpoint. If it is set to "changed", it will throw an error if the endpoints behaviour has
changed after the version of Mastodon that is connected has been released. If it is set to "none",
version checking is disabled.
2016-11-23 23:30:51 +01:00
"""
self.api_base_url = Mastodon.__protocolize(api_base_url)
self.client_id = client_id
2016-11-23 23:30:51 +01:00
self.client_secret = client_secret
self.access_token = access_token
2016-11-24 20:07:00 +01:00
self.debug_requests = debug_requests
2016-11-25 18:17:39 +01:00
self.ratelimit_method = ratelimit_method
2017-04-07 23:59:39 +02:00
self._token_expired = datetime.datetime.now()
self._refresh_token = None
2018-07-30 14:37:20 +02:00
self.__logged_in_id = None
2017-09-08 15:06:31 +02:00
self.ratelimit_limit = 300
2016-11-25 18:17:39 +01:00
self.ratelimit_reset = time.time()
2017-09-08 15:06:31 +02:00
self.ratelimit_remaining = 300
2016-11-25 18:17:39 +01:00
self.ratelimit_lastcall = time.time()
2016-11-25 23:14:00 +01:00
self.ratelimit_pacefactor = ratelimit_pacefactor
2016-12-13 17:17:33 +01:00
self.request_timeout = request_timeout
if session:
self.session = session
else:
self.session = requests.Session()
# Versioning
if mastodon_version == None:
self.retrieve_mastodon_version()
else:
try:
self.mastodon_major, self.mastodon_minor, self.mastodon_patch = parse_version_string(mastodon_version)
except:
raise MastodonVersionError("Bad version specified")
2017-12-14 00:27:34 +01:00
if not version_check_mode in ["created", "changed", "none"]:
raise MastodonIllegalArgumentError("Invalid version check method.")
self.version_check_mode = version_check_mode
# Ratelimiting parameter check
2017-09-05 22:59:32 +02:00
if ratelimit_method not in ["throw", "wait", "pace"]:
2016-11-25 23:28:30 +01:00
raise MastodonIllegalArgumentError("Invalid ratelimit method.")
2017-12-11 11:49:14 +01:00
# Token loading
if self.client_id is not None:
if os.path.isfile(self.client_id):
with open(self.client_id, 'r') as secret_file:
self.client_id = secret_file.readline().rstrip()
self.client_secret = secret_file.readline().rstrip()
else:
if self.client_secret is None:
raise MastodonIllegalArgumentError('Specified client id directly, but did not supply secret')
2018-06-05 17:57:22 +02:00
if self.access_token is not None and os.path.isfile(self.access_token):
with open(self.access_token, 'r') as token_file:
self.access_token = token_file.readline().rstrip()
2017-12-11 14:00:14 +01:00
def retrieve_mastodon_version(self):
"""
Determine installed mastodon version and set major, minor and patch (not including RC info) accordingly.
Returns the version string, possibly including rc info.
"""
try:
version_str = self.__instance()["version"]
except:
# instance() was added in 1.1.0, so our best guess is 1.0.0.
version_str = "1.0.0"
self.mastodon_major, self.mastodon_minor, self.mastodon_patch = parse_version_string(version_str)
2017-12-11 14:00:14 +01:00
return version_str
2019-05-08 17:43:55 +02:00
def verify_minimum_version(self, version_str, cached=False):
"""
Update version info from server and verify that at least the specified version is present.
2017-12-11 14:00:14 +01:00
2019-05-08 17:43:55 +02:00
If you specify "cached", the version info update part is skipped.
Returns True if version requirement is satisfied, False if not.
"""
2019-05-08 17:43:55 +02:00
if not cached:
self.retrieve_mastodon_version()
major, minor, patch = parse_version_string(version_str)
if major > self.mastodon_major:
return False
elif major == self.mastodon_major and minor > self.mastodon_minor:
return False
elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch:
return False
return True
2017-12-15 20:26:25 +01:00
2018-01-29 13:39:47 +01:00
@staticmethod
def get_supported_version():
2017-12-15 20:26:25 +01:00
"""
Retrieve the maximum version of Mastodon supported by this version of Mastodon.py
"""
2018-01-29 13:39:47 +01:00
return Mastodon.__SUPPORTED_MASTODON_VERSION
2017-09-05 22:59:32 +02:00
def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob",
2019-04-28 01:02:08 +02:00
scopes=__DEFAULT_SCOPES, force_login=False):
"""
Returns the url that a client needs to request an oauth grant from the server.
To log in with oauth, send your user to this URL. The user will then log in and
get a code which you can pass to log_in.
scopes are as in `log_in()`_, redirect_uris is where the user should be redirected to
after authentication. Note that redirect_uris must be one of the URLs given during
app registration. When using urn:ietf:wg:oauth:2.0:oob, the code is simply displayed,
otherwise it is added to the given URL as the "code" request parameter.
Pass force_login if you want the user to always log in even when already logged
into web mastodon (i.e. when registering multiple different accounts in an app).
2017-04-07 23:59:39 +02:00
"""
if client_id is None:
client_id = self.client_id
else:
if os.path.isfile(client_id):
with open(client_id, 'r') as secret_file:
client_id = secret_file.readline().rstrip()
2017-09-05 22:59:32 +02:00
params = dict()
2017-04-07 23:59:39 +02:00
params['client_id'] = client_id
params['response_type'] = "code"
params['redirect_uri'] = redirect_uris
params['scope'] = " ".join(scopes)
2019-04-28 01:02:08 +02:00
params['force_login'] = force_login
2017-04-07 23:59:39 +02:00
formatted_params = urlencode(params)
return "".join([self.api_base_url, "/oauth/authorize?", formatted_params])
2017-09-05 22:59:32 +02:00
def log_in(self, username=None, password=None,
code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None,
2018-07-30 13:35:29 +02:00
scopes=__DEFAULT_SCOPES, to_file=None):
2017-04-07 23:59:39 +02:00
"""
2017-12-13 18:40:17 +01:00
Get the access token for a user.
The username is the e-mail used to log in into mastodon.
2017-04-26 13:29:34 +02:00
2017-12-13 18:40:17 +01:00
Can persist access token to file `to_file`, to be used in the constructor.
2017-09-05 22:59:32 +02:00
2017-12-13 18:40:17 +01:00
Handles password and OAuth-based authorization.
Will throw a `MastodonIllegalArgumentError` if the OAuth or the
username / password credentials given are incorrect, and
`MastodonAPIError` if all of the requested scopes were not granted.
2017-04-07 23:59:39 +02:00
2019-04-28 01:02:08 +02:00
For OAuth2, obtain a code via having your user go to the url returned by
`auth_request_url()`_ and pass it as the code parameter. In this case,
make sure to also pass the same redirect_uri parameter as you used when
generating the auth request URL.
2017-04-07 23:59:39 +02:00
2017-12-13 18:40:17 +01:00
Returns the access token as a string.
2017-04-07 23:59:39 +02:00
"""
if username is not None and password is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'code', 'refresh_token'])
params['grant_type'] = 'password'
elif code is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'username', 'password', 'refresh_token'])
params['grant_type'] = 'authorization_code'
elif refresh_token is not None:
params = self.__generate_params(locals(), ['scopes', 'to_file', 'username', 'password', 'code'])
params['grant_type'] = 'refresh_token'
else:
raise MastodonIllegalArgumentError('Invalid arguments given. username and password or code are required.')
2017-09-05 22:59:32 +02:00
2016-11-23 23:30:51 +01:00
params['client_id'] = self.client_id
params['client_secret'] = self.client_secret
params['scope'] = " ".join(scopes)
2017-09-05 22:59:32 +02:00
2016-11-24 12:03:42 +01:00
try:
2017-09-05 22:59:32 +02:00
response = self.__api_request('POST', '/oauth/token', params, do_ratelimiting=False)
2016-11-24 12:03:42 +01:00
self.access_token = response['access_token']
2017-04-26 11:43:16 +02:00
self.__set_refresh_token(response.get('refresh_token'))
self.__set_token_expired(int(response.get('expires_in', 0)))
except Exception as e:
if username is not None or password is not None:
raise MastodonIllegalArgumentError('Invalid user name, password, or redirect_uris: %s' % e)
elif code is not None:
raise MastodonIllegalArgumentError('Invalid access token or redirect_uris: %s' % e)
2017-04-10 17:25:39 +02:00
else:
raise MastodonIllegalArgumentError('Invalid request: %s' % e)
received_scopes = response["scope"].split(" ")
2018-07-30 13:35:29 +02:00
for scope_set in self.__SCOPE_SETS.keys():
if scope_set in received_scopes:
received_scopes += self.__SCOPE_SETS[scope_set]
if not set(scopes) <= set(received_scopes):
2017-09-05 22:59:32 +02:00
raise MastodonAPIError(
'Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".')
2017-09-05 22:59:32 +02:00
if to_file is not None:
2016-11-23 23:30:51 +01:00
with open(to_file, 'w') as token_file:
token_file.write(response['access_token'] + '\n')
2018-07-30 14:37:20 +02:00
self.__logged_in_id = None
2016-11-23 23:30:51 +01:00
return response['access_token']
2019-04-28 17:56:20 +02:00
@api_version("2.7.0", "2.7.0", "2.7.0")
def create_account(self, username, password, email, agreement=False, locale="en", scopes=__DEFAULT_SCOPES, to_file=None):
"""
Creates a new user account with the given username, password and email. "agreement"
must be set to true (after showing the user the instances user agreement and having
them agree to it), "locale" specifies the language for the confirmation e-mail as an
ISO 639-1 (two-letter) language code.
Does not require an access token, but does require a client grant.
By default, this method is rate-limited by IP to 5 requests per 30 minutes.
Returns an access token (just like log_in), which it can also persist to to_file,
and sets it internally so that the user is now logged in. Note that this token
can only be used after the user has confirmed their e-mail.
"""
params = self.__generate_params(locals(), ['to_file', 'scopes'])
params['client_id'] = self.client_id
params['client_secret'] = self.client_secret
if agreement == False:
2019-05-31 13:45:35 +02:00
del params['agreement']
2019-04-28 17:56:20 +02:00
# Step 1: Get a user-free token via oauth
try:
oauth_params = {}
oauth_params['scope'] = " ".join(scopes)
oauth_params['client_id'] = self.client_id
oauth_params['client_secret'] = self.client_secret
oauth_params['grant_type'] = 'client_credentials'
response = self.__api_request('POST', '/oauth/token', oauth_params, do_ratelimiting=False)
temp_access_token = response['access_token']
except Exception as e:
raise MastodonIllegalArgumentError('Invalid request during oauth phase: %s' % e)
# Step 2: Use that to create a user
try:
response = self.__api_request('POST', '/api/v1/accounts', params, do_ratelimiting=False,
access_token_override = temp_access_token)
self.access_token = response['access_token']
self.__set_refresh_token(response.get('refresh_token'))
self.__set_token_expired(int(response.get('expires_in', 0)))
except Exception as e:
raise MastodonIllegalArgumentError('Invalid request: %s' % e)
# Step 3: Check scopes, persist, et cetera
received_scopes = response["scope"].split(" ")
for scope_set in self.__SCOPE_SETS.keys():
if scope_set in received_scopes:
received_scopes += self.__SCOPE_SETS[scope_set]
if not set(scopes) <= set(received_scopes):
raise MastodonAPIError(
'Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".')
if to_file is not None:
with open(to_file, 'w') as token_file:
token_file.write(response['access_token'] + '\n')
self.__logged_in_id = None
return response['access_token']
###
2017-06-15 22:48:23 +02:00
# Reading data: Instances
###
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "2.3.0", __DICT_VERSION_INSTANCE)
def instance(self):
"""
Retrieve basic information about the instance, including the URI and administrative contact email.
Does not require authentication.
2017-12-13 18:40:17 +01:00
Returns an `instance dict`_.
"""
2017-12-11 14:22:35 +01:00
return self.__instance()
2017-12-11 14:00:14 +01:00
def __instance(self):
"""
Internal, non-version-checking helper that does the same as instance()
"""
instance = self.__api_request('GET', '/api/v1/instance/')
return instance
2018-01-29 14:14:58 +01:00
2018-05-07 00:53:13 +02:00
@api_version("2.1.2", "2.1.2", __DICT_VERSION_ACTIVITY)
2018-01-29 14:14:58 +01:00
def instance_activity(self):
"""
Retrieve activity stats about the instance. May be disabled by the instance administrator - throws
a MastodonNotFoundError in that case.
Activity is returned for 12 weeks going back from the current week.
Returns a list of `activity dicts`_.
2018-01-29 14:14:58 +01:00
"""
return self.__api_request('GET', '/api/v1/instance/activity')
2018-05-07 00:53:13 +02:00
@api_version("2.1.2", "2.1.2", "2.1.2")
2018-01-29 14:14:58 +01:00
def instance_peers(self):
"""
Retrieve the instances that this instance knows about. May be disabled by the instance administrator - throws
a MastodonNotFoundError in that case.
Returns a list of URL strings.
"""
return self.__api_request('GET', '/api/v1/instance/peers')
2016-11-23 23:30:51 +01:00
###
# Reading data: Timelines
##
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline(self, timeline="home", max_id=None, min_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Fetch statuses, most recent ones first. `timeline` can be 'home', 'local', 'public',
2017-12-13 21:16:02 +01:00
'tag/hashtag' or 'list/id'. See the following functions documentation for what those do.
2017-12-13 18:40:17 +01:00
Local hashtag timelines are supported via the `timeline_hashtag()`_ function.
The default timeline is the "home" timeline.
2018-05-06 17:46:23 +02:00
Media only queries are supported via the `timeline_public()`_ and `timeline_hashtag()`_ functions.
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-04-02 19:35:42 +02:00
params_initial = locals()
if timeline == "local":
timeline = "public"
params_initial['local'] = True
params = self.__generate_params(params_initial, ['timeline'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/timelines/{0}'.format(timeline)
return self.__api_request('GET', url, params)
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline_home(self, max_id=None, min_id=None, since_id=None, limit=None):
"""
2017-12-13 18:40:17 +01:00
Fetch the logged-in users home timeline (i.e. followed users and self).
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
"""
return self.timeline('home', max_id=max_id, min_id=min_id,
since_id=since_id, limit=limit)
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline_local(self, max_id=None, min_id=None, since_id=None, limit=None):
2017-04-02 19:35:42 +02:00
"""
2017-04-25 11:41:48 +02:00
Fetches the local / instance-wide timeline, not including replies.
2017-04-02 19:35:42 +02:00
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
2017-04-02 19:35:42 +02:00
"""
return self.timeline('local', max_id=max_id, min_id=min_id,
since_id=since_id, limit=limit)
2017-04-02 19:35:42 +02:00
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline_public(self, max_id=None, min_id=None, since_id=None, limit=None, only_media=False):
"""
2017-04-25 11:41:48 +02:00
Fetches the public / visible-network timeline, not including replies.
2018-05-07 00:53:13 +02:00
Set `only_media` to True to retrieve only statuses with media attachments.
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
"""
2018-05-07 00:53:13 +02:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2018-05-07 00:53:13 +02:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2018-05-07 00:53:13 +02:00
params_initial = locals()
if only_media == False:
del params_initial['only_media']
url = '/api/v1/timelines/public'
params = self.__generate_params(params_initial)
return self.__api_request('GET', url, params)
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline_hashtag(self, hashtag, local=False, max_id=None, min_id=None, since_id=None, limit=None, only_media=False):
"""
Fetch a timeline of toots with a given hashtag. The hashtag parameter
should not contain the leading #.
2017-12-13 18:40:17 +01:00
Set `local` to True to retrieve only instance-local tagged posts.
2018-05-07 00:53:13 +02:00
Set `only_media` to True to retrieve only statuses with media attachments.
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
"""
if hashtag.startswith("#"):
raise MastodonIllegalArgumentError("Hashtag parameter should omit leading #")
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params_initial = locals()
if local == False:
del params_initial['local']
2018-05-07 00:53:13 +02:00
if only_media == False:
del params_initial['only_media']
url = '/api/v1/timelines/tag/{0}'.format(hashtag)
params = self.__generate_params(params_initial, ['hashtag'])
return self.__api_request('GET', url, params)
@api_version("2.1.0", "2.6.0", __DICT_VERSION_STATUS)
def timeline_list(self, id, max_id=None, min_id=None, since_id=None, limit=None):
2017-12-13 21:16:02 +01:00
"""
Fetches a timeline containing all the toots by users in a given list.
Returns a list of `toot dicts`_.
"""
id = self.__unpack_id(id)
return self.timeline('list/{0}'.format(id), max_id=max_id,
min_id=min_id, since_id=since_id, limit=limit)
2017-12-13 21:16:02 +01:00
2019-04-28 13:47:43 +02:00
@api_version("2.6.0", "2.6.0", __DICT_VERSION_CONVERSATION)
def conversations(self, max_id=None, min_id=None, since_id=None, limit=None):
"""
Fetches a users conversations.
Returns a list of `conversation dicts`_.
"""
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', "/api/v1/conversations/", params)
2016-11-23 23:30:51 +01:00
###
# Reading data: Statuses
###
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.0.0", __DICT_VERSION_STATUS)
2016-11-23 23:30:51 +01:00
def status(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch information about a single toot.
Does not require authentication for publicly visible statuses.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
@api_version("1.0.0", "3.0.0", __DICT_VERSION_CARD)
def status_card(self, id):
"""
2017-06-15 22:32:17 +02:00
Fetch a card associated with a status. A card describes an object (such as an
external video or link) embedded into a status.
Does not require authentication for publicly visible statuses.
This function is deprecated as of 3.0.0 and the endpoint does not
exist anymore - you should just use the "card" field of the status dicts
instead. Mastodon.py will try to mimick the old behaviour, but this
is somewhat inefficient and not guaranteed to be the case forever.
2017-12-13 18:40:17 +01:00
Returns a `card dict`_.
"""
if self.verify_minimum_version("3.0.0"):
return self.status(id).card
else:
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/card'.format(str(id))
return self.__api_request('GET', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", __DICT_VERSION_CONTEXT)
2016-11-23 23:30:51 +01:00
def status_context(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch information about ancestors and descendants of a toot.
Does not require authentication for publicly visible statuses.
2017-12-13 18:40:17 +01:00
Returns a `context dict`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/context'.format(str(id))
return self.__api_request('GET', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.1.0", __DICT_VERSION_ACCOUNT)
2016-11-23 23:30:51 +01:00
def status_reblogged_by(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch a list of users that have reblogged a status.
Does not require authentication for publicly visible statuses.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/reblogged_by'.format(str(id))
return self.__api_request('GET', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.1.0", __DICT_VERSION_ACCOUNT)
2016-11-23 23:30:51 +01:00
def status_favourited_by(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch a list of users that have favourited a status.
Does not require authentication for publicly visible statuses.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/favourited_by'.format(str(id))
return self.__api_request('GET', url)
2019-04-28 19:18:23 +02:00
###
# Reading data: Scheduled statuses
###
@api_version("2.7.0", "2.7.0", __DICT_VERSION_SCHEDULED_STATUS)
def scheduled_statuses(self):
"""
Fetch a list of scheduled statuses
Returns a list of `scheduled toot dicts`_.
"""
return self.__api_request('GET', '/api/v1/scheduled_statuses')
@api_version("2.7.0", "2.7.0", __DICT_VERSION_SCHEDULED_STATUS)
def scheduled_status(self, id):
"""
Fetch information about the scheduled status with the given id.
Returns a `scheduled toot dict`_.
"""
id = self.__unpack_id(id)
url = '/api/v1/scheduled_statuses/{0}'.format(str(id))
return self.__api_request('GET', url)
2019-04-28 23:12:27 +02:00
###
# Reading data: Polls
###
@api_version("2.8.0", "2.8.0", __DICT_VERSION_POLL)
def poll(self, id):
"""
Fetch information about the poll with the given id
Returns a `poll dict`_.
"""
id = self.__unpack_id(id)
url = '/api/v1/polls/{0}'.format(str(id))
return self.__api_request('GET', url)
###
# Reading data: Notifications
###
2019-06-22 17:33:12 +02:00
@api_version("1.0.0", "2.9.0", __DICT_VERSION_NOTIFICATION)
def notifications(self, id=None, account_id=None, max_id=None, min_id=None, since_id=None, limit=None):
"""
2017-12-13 18:40:17 +01:00
Fetch notifications (mentions, favourites, reblogs, follows) for the logged-in
2019-06-22 17:33:12 +02:00
user. Pass `account_id` to get only notifications originating from the given account.
2017-12-13 18:40:17 +01:00
Can be passed an `id` to fetch a single notification.
2017-04-26 11:56:47 +02:00
2017-12-13 18:40:17 +01:00
Returns a list of `notification dicts`_.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2019-06-22 17:33:12 +02:00
if account_id != None:
account_id = self.__unpack_id(account_id)
2017-09-05 22:59:32 +02:00
if id is None:
params = self.__generate_params(locals(), ['id'])
return self.__api_request('GET', '/api/v1/notifications', params)
2017-04-26 11:56:47 +02:00
else:
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/notifications/{0}'.format(str(id))
return self.__api_request('GET', url)
2016-11-23 23:30:51 +01:00
###
# Reading data: Accounts
###
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", __DICT_VERSION_ACCOUNT)
2016-11-23 23:30:51 +01:00
def account(self, id):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Fetch account information by user `id`.
Does not require authentication.
2017-12-13 18:40:17 +01:00
Returns a `user dict`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}'.format(str(id))
return self.__api_request('GET', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.1.0", __DICT_VERSION_ACCOUNT)
2016-11-23 23:30:51 +01:00
def account_verify_credentials(self):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Fetch logged-in user's account information.
2017-12-13 18:40:17 +01:00
Returns a `user dict`_ (Starting from 2.1.0, with an additional "source" field).
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
return self.__api_request('GET', '/api/v1/accounts/verify_credentials')
@api_version("1.0.0", "2.7.0", __DICT_VERSION_STATUS)
def account_statuses(self, id, only_media=False, pinned=False, exclude_replies=False, max_id=None, min_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Fetch statuses by user `id`. Same options as `timeline()`_ are permitted.
Returned toots are from the perspective of the logged-in user, i.e.
all statuses visible to the logged-in user (including DMs) are
included.
2017-12-14 00:27:34 +01:00
If `only_media` is set, return only statuses with media attachments.
2017-12-14 11:17:18 +01:00
If `pinned` is set, return only statuses that have been pinned. Note that
as of Mastodon 2.1.0, this only works properly for instance-local users.
2017-12-14 00:27:34 +01:00
If `exclude_replies` is set, filter out all statuses that are replies.
Does not require authentication for Mastodon versions after 2.7.0 (returns
publicly visible statuses in that case).
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-12-14 11:17:18 +01:00
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals(), ['id'])
2017-12-14 11:17:18 +01:00
if pinned == False:
del params["pinned"]
if only_media == False:
del params["only_media"]
if exclude_replies == False:
del params["exclude_replies"]
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/statuses'.format(str(id))
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
@api_version("1.0.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def account_following(self, id, max_id=None, min_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch users the given user is following.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/following'.format(str(id))
return self.__api_request('GET', url, params)
2016-11-23 23:30:51 +01:00
@api_version("1.0.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def account_followers(self, id, max_id=None, min_id=None, since_id=None, limit=None):
2016-11-24 00:36:00 +01:00
"""
Fetch users the given user is followed by.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/followers'.format(str(id))
return self.__api_request('GET', url, params)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.4.0", __DICT_VERSION_RELATIONSHIP)
2016-11-23 23:30:51 +01:00
def account_relationships(self, id):
2016-11-24 00:36:00 +01:00
"""
Fetch relationship (following, followed_by, blocking, follow requested) of
2017-12-13 18:40:17 +01:00
the logged in user to a given account. `id` can be a list.
2017-12-13 18:40:17 +01:00
Returns a list of `relationship dicts`_.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals())
2017-09-05 22:59:32 +02:00
return self.__api_request('GET', '/api/v1/accounts/relationships',
params)
2016-11-23 23:30:51 +01:00
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.3.0", __DICT_VERSION_ACCOUNT)
2018-04-17 17:24:46 +02:00
def account_search(self, q, limit=None, following=False):
2016-11-24 00:36:00 +01:00
"""
Fetch matching accounts. Will lookup an account remotely if the search term is
2018-04-17 17:21:16 +02:00
in the username@domain format and not yet in the database. Set `following` to
True to limit the search to users the logged-in user follows.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
params = self.__generate_params(locals())
2018-07-14 01:02:37 +02:00
if params["following"] == False:
del params["following"]
2016-11-23 23:30:51 +01:00
return self.__api_request('GET', '/api/v1/accounts/search', params)
2017-04-07 15:12:24 +02:00
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_LIST)
2017-12-13 18:59:32 +01:00
def account_lists(self, id):
"""
2018-06-04 14:54:26 +02:00
Get all of the logged-in users lists which the specified user is
2017-12-13 18:59:32 +01:00
a member of.
Returns a list of `list dicts`_.
"""
2018-05-06 15:42:38 +02:00
id = self.__unpack_id(id)
2017-12-13 18:59:32 +01:00
params = self.__generate_params(locals(), ['id'])
url = '/api/v1/accounts/{0}/lists'.format(str(id))
return self.__api_request('GET', url, params)
2018-07-30 17:29:37 +02:00
###
# Reading data: Keyword filters
###
@api_version("2.4.3", "2.4.3", __DICT_VERSION_FILTER)
2018-07-30 23:08:30 +02:00
def filters(self):
2018-07-30 17:29:37 +02:00
"""
Fetch all of the logged-in users filters.
Returns a list of `filter dicts`_. Not paginated.
"""
return self.__api_request('GET', '/api/v1/filters')
@api_version("2.4.3", "2.4.3", __DICT_VERSION_FILTER)
2018-07-30 23:08:30 +02:00
def filter(self, id):
2018-07-30 17:29:37 +02:00
"""
Fetches information about the filter with the specified `id`.
Returns a `filter dict`_.
"""
id = self.__unpack_id(id)
url = '/api/v1/filters/{0}'.format(str(id))
return self.__api_request('GET', url)
2018-07-30 22:09:14 +02:00
@api_version("2.4.3", "2.4.3", __DICT_VERSION_FILTER)
def filters_apply(self, objects, filters, context):
"""
Helper function: Applies a list of filters to a list of either statuses
or notifications and returns only those matched by none. This function will
apply all filters that match the context provided in `context`, i.e.
if you want to apply only notification-relevant filters, specify
'notifications'. Valid contexts are 'home', 'notifications', 'public' and 'thread'.
"""
2018-07-30 17:29:37 +02:00
2018-07-30 22:09:14 +02:00
# Build filter regex
filter_strings = []
for keyword_filter in filters:
if not context in keyword_filter["context"]:
continue
filter_string = re.escape(keyword_filter["phrase"])
if keyword_filter["whole_word"] == True:
filter_string = "\\b" + filter_string + "\\b"
filter_strings.append(filter_string)
2018-07-30 23:08:30 +02:00
filter_re = re.compile("|".join(filter_strings), flags = re.IGNORECASE)
2018-07-30 22:09:14 +02:00
# Apply
filter_results = []
for filter_object in objects:
filter_status = filter_object
if "status" in filter_object:
filter_status = filter_object["status"]
filter_text = filter_status["content"]
filter_text = re.sub(r"<.*?>", " ", filter_text)
filter_text = re.sub(r"\s+", " ", filter_text).strip()
2018-07-30 23:08:30 +02:00
if not filter_re.search(filter_text):
2018-07-30 22:09:14 +02:00
filter_results.append(filter_object)
return filter_results
2018-07-30 16:20:56 +02:00
###
# Reading data: Follow suggestions
###
@api_version("2.4.3", "2.4.3", __DICT_VERSION_ACCOUNT)
def suggestions(self):
"""
Fetch follow suggestions for the logged-in user.
Returns a list of `user dicts`_.
"""
return self.__api_request('GET', '/api/v1/suggestions')
2019-04-27 22:13:27 +02:00
###
# Reading data: Endorsements
###
@api_version("2.5.0", "2.5.0", __DICT_VERSION_ACCOUNT)
def endorsements(self):
"""
Fetch list of users endorsemed by the logged-in user.
Returns a list of `user dicts`_.
"""
return self.__api_request('GET', '/api/v1/endorsements')
2017-04-26 11:56:47 +02:00
###
# Reading data: Searching
###
2019-05-08 17:43:55 +02:00
def __ensure_search_params_acceptable(self, account_id, offset, min_id, max_id):
"""
Internal Helper: Throw a MastodonVersionError if version is < 2.8.0 but parameters
for search that are available only starting with 2.8.0 are specified.
"""
if not account_id is None or not offset is None or not min_id is None or not max_id is None:
if self.verify_minimum_version("2.8.0", cached=True) == False:
raise MastodonVersionError("Advanced search parameters require Mastodon 2.8.0+")
@api_version("1.1.0", "2.8.0", __DICT_VERSION_SEARCHRESULT)
2019-04-28 21:15:47 +02:00
def search(self, q, resolve=True, result_type=None, account_id=None, offset=None, min_id=None, max_id=None):
2017-04-07 15:12:24 +02:00
"""
2019-04-28 21:15:47 +02:00
Fetch matching hashtags, accounts and statuses. Will perform webfinger
lookups if resolve is True. Full-text search is only enabled if
2018-07-30 14:49:25 +02:00
the instance supports it, and is restricted to statuses the logged-in
user wrote or was mentioned in.
2019-04-28 21:15:47 +02:00
`result_type` can be one of "accounts", "hashtags" or "statuses", to only
search for that type of object.
Specify `account_id` to only get results from the account with that id.
`offset`, `min_id` and `max_id` can be used to paginate.
2019-05-08 17:43:55 +02:00
Will use search_v1 (no tag dicts in return values) on Mastodon versions before
2.4.1), search_v2 otherwise. Parameters other than resolve are only available
on Mastodon 2.8.0 or above - this function will throw a MastodonVersionError
if you try to use them on versions before that. Note that the cached version
number will be used for this to avoid uneccesary requests.
2019-04-28 21:15:47 +02:00
Returns a `search result dict`_, with tags as `hashtag dicts`_.
"""
2019-05-08 17:43:55 +02:00
if self.verify_minimum_version("2.4.1", cached=True) == True:
return self.search_v2(q, resolve=resolve, result_type=result_type, account_id=account_id,
offset=offset, min_id=min_id, max_id=max_id)
else:
self.__ensure_search_params_acceptable(account_id, offset, min_id, max_id)
return self.search_v1(q, resolve=resolve)
@api_version("1.1.0", "2.1.0", "2.1.0")
2019-04-28 21:15:47 +02:00
def search_v1(self, q, resolve=False):
"""
Identical to `search_v2()`, except in that it does not return
tags as `hashtag dicts`_.
2017-04-07 15:12:24 +02:00
2017-12-13 18:40:17 +01:00
Returns a `search result dict`_.
2017-04-07 15:12:24 +02:00
"""
params = self.__generate_params(locals())
2019-04-28 21:15:47 +02:00
if resolve == False:
del params['resolve']
2017-04-07 15:12:24 +02:00
return self.__api_request('GET', '/api/v1/search', params)
2019-05-08 17:43:55 +02:00
@api_version("2.4.1", "2.8.0", __DICT_VERSION_SEARCHRESULT)
2019-04-28 21:15:47 +02:00
def search_v2(self, q, resolve=True, result_type=None, account_id=None, offset=None, min_id=None, max_id=None):
2018-07-30 15:38:51 +02:00
"""
2019-04-28 21:15:47 +02:00
Identical to `search_v1()`, except in that it returns tags as
`hashtag dicts`_, has more parameters, and resolves by default.
2018-07-30 15:38:51 +02:00
Returns a `search result dict`_.
"""
2019-05-08 17:43:55 +02:00
self.__ensure_search_params_acceptable(account_id, offset, min_id, max_id)
2018-07-30 15:38:51 +02:00
params = self.__generate_params(locals())
2019-04-28 21:15:47 +02:00
if resolve == False:
del params['resolve']
if "result_type" in params:
params["type"] = params["result_type"]
del params["result_type"]
2018-07-30 15:38:51 +02:00
return self.__api_request('GET', '/api/v2/search', params)
2017-12-13 17:47:50 +01:00
###
# Reading data: Lists
###
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_LIST)
2017-12-13 17:47:50 +01:00
def lists(self):
"""
Fetch a list of all the Lists by the logged-in user.
2017-12-13 18:40:17 +01:00
Returns a list of `list dicts`_.
2017-12-13 17:47:50 +01:00
"""
return self.__api_request('GET', '/api/v1/lists')
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_LIST)
2017-12-13 21:16:02 +01:00
def list(self, id):
"""
Fetch info about a specific list.
Returns a `list dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('GET', '/api/v1/lists/{0}'.format(id))
@api_version("2.1.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def list_accounts(self, id, max_id=None, min_id=None, since_id=None, limit=None):
2017-12-13 22:16:10 +01:00
"""
Get the accounts that are on the given list.
2017-12-13 22:16:10 +01:00
Returns a list of `user dicts`_.
"""
id = self.__unpack_id(id)
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-12-13 22:16:10 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['id'])
return self.__api_request('GET', '/api/v1/lists/{0}/accounts'.format(id))
###
# Reading data: Mutes and Blocks
###
@api_version("1.1.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def mutes(self, max_id=None, min_id=None, since_id=None, limit=None):
"""
2017-12-13 18:40:17 +01:00
Fetch a list of users muted by the logged-in user.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/mutes', params)
@api_version("1.0.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def blocks(self, max_id=None, min_id=None, since_id=None, limit=None):
"""
2017-12-13 18:40:17 +01:00
Fetch a list of users blocked by the logged-in user.
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/blocks', params)
###
# Reading data: Reports
###
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.1.0", __DICT_VERSION_REPORT)
def reports(self):
"""
2017-12-13 18:40:17 +01:00
Fetch a list of reports made by the logged-in user.
2017-12-13 18:40:17 +01:00
Returns a list of `report dicts`_.
2017-11-21 14:33:04 +01:00
Warning: This method has now finally been removed, and will not
work on mastodon versions 2.5.0 and above.
"""
return self.__api_request('GET', '/api/v1/reports')
###
# Reading data: Favourites
###
@api_version("1.0.0", "2.6.0", __DICT_VERSION_STATUS)
def favourites(self, max_id=None, min_id=None, since_id=None, limit=None):
"""
2017-12-13 18:40:17 +01:00
Fetch the logged-in user's favourited statuses.
2017-12-13 18:40:17 +01:00
Returns a list of `toot dicts`_.
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/favourites', params)
2017-04-03 05:53:32 +02:00
###
# Reading data: Follow requests
###
@api_version("1.0.0", "2.6.0", __DICT_VERSION_ACCOUNT)
def follow_requests(self, max_id=None, min_id=None, since_id=None, limit=None):
2017-04-03 05:53:32 +02:00
"""
2017-12-13 18:40:17 +01:00
Fetch the logged-in user's incoming follow requests.
2017-04-03 05:53:32 +02:00
2017-12-13 18:40:17 +01:00
Returns a list of `user dicts`_.
2017-04-03 05:53:32 +02:00
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-04-03 05:53:32 +02:00
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/follow_requests', params)
2017-06-15 23:13:34 +02:00
###
# Reading data: Domain blocks
###
@api_version("1.4.0", "2.6.0", "1.4.0")
def domain_blocks(self, max_id=None, min_id=None, since_id=None, limit=None):
2017-06-15 23:13:34 +02:00
"""
2017-12-13 18:40:17 +01:00
Fetch the logged-in user's blocked domains.
2017-06-15 23:13:34 +02:00
2017-06-15 23:15:38 +02:00
Returns a list of blocked domain URLs (as strings, without protocol specifier).
2017-06-15 23:13:34 +02:00
"""
2017-11-22 10:26:44 +01:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
2017-11-22 10:26:44 +01:00
if since_id != None:
since_id = self.__unpack_id(since_id)
2017-06-15 23:13:34 +02:00
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
2017-12-13 17:17:42 +01:00
###
# Reading data: Emoji
###
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_EMOJI)
2017-12-13 17:17:42 +01:00
def custom_emojis(self):
"""
Fetch the list of custom emoji the instance has installed.
Does not require authentication.
2017-12-13 18:40:17 +01:00
Returns a list of `emoji dicts`_.
2017-12-13 17:17:42 +01:00
"""
return self.__api_request('GET', '/api/v1/custom_emojis')
###
# Reading data: Apps
###
@api_version("2.0.0", "2.7.2", __DICT_VERSION_APPLICATION)
def app_verify_credentials(self):
"""
Fetch information about the current application.
Returns an `application dict`_.
"""
return self.__api_request('GET', '/api/v1/apps/verify_credentials')
2018-06-05 14:10:53 +02:00
###
# Reading data: Webpush subscriptions
###
@api_version("2.4.0", "2.4.0", __DICT_VERSION_PUSH)
def push_subscription(self):
"""
Fetch the current push subscription the logged-in user has for this app.
Returns a `push subscription dict`_.
"""
return self.__api_request('GET', '/api/v1/push/subscription')
2019-04-28 21:53:01 +02:00
###
# Reading data: Preferences
###
@api_version("2.8.0", "2.8.0", __DICT_VERSION_PREFERENCES)
def preferences(self):
"""
Fetch the users preferences, which can be used to set some default options.
As of 2.8.0, apps can only fetch, not update preferences.
Returns a `preference dict`_.
"""
return self.__api_request('GET', '/api/v1/preferences')
2016-11-23 23:30:51 +01:00
###
# Writing data: Statuses
###
2019-04-28 23:12:27 +02:00
@api_version("1.0.0", "2.8.0", __DICT_VERSION_STATUS)
2017-09-05 22:59:32 +02:00
def status_post(self, status, in_reply_to_id=None, media_ids=None,
sensitive=False, visibility=None, spoiler_text=None,
language=None, idempotency_key=None, content_type=None,
2019-04-28 23:12:27 +02:00
scheduled_at=None, poll=None):
2016-11-24 00:36:00 +01:00
"""
Post a status. Can optionally be in reply to another status and contain
media.
`media_ids` should be a list. (If it's not, the function will turn it
into one.) It can contain up to four pieces of media (uploaded via
`media_post()`_). `media_ids` can also be the `media dicts`_ returned
by `media_post()`_ - they are unpacked automatically.
2017-12-13 18:40:17 +01:00
The `sensitive` boolean decides whether or not media attached to the post
should be marked as sensitive, which hides it by default on the Mastodon
web front-end.
2019-04-28 21:24:31 +02:00
The visibility parameter is a string value and accepts any of:
2017-06-15 22:32:17 +02:00
'direct' - post will be visible only to mentioned users
'private' - post will be visible only to followers
'unlisted' - post will be public but not appear on the public timeline
'public' - post will be public
If not passed in, visibility defaults to match the current account's
default-privacy setting (starting with Mastodon version 1.6) or its
locked setting - private if the account is locked, public otherwise
(for Mastodon versions lower than 1.6).
2017-12-13 18:40:17 +01:00
The `spoiler_text` parameter is a string to be shown as a warning before
2017-04-03 03:46:43 +02:00
the text of the status. If no text is passed in, no warning will be
displayed.
2018-07-30 15:22:11 +02:00
Specify `language` to override automatic language detection. The parameter
accepts all valid ISO 639-2 language codes.
You can set `idempotency_key` to a value to uniquely identify an attempt
at posting a status. Even if you call this function more than once,
if you call it with the same `idempotency_key`, only one status will
be created.
Pass a datetime as `scheduled_at` to schedule the toot for a specific time
(the time must be at least 5 minutes into the future). If this is passed,
status_post returns a `scheduled toot dict`_ instead.
2019-04-28 23:12:27 +02:00
Pass `poll` to attach a poll to the status. An appropriate object can be
constructed using `make_poll()`_ . Note that as of Mastodon version
2.8.2, you can only have either media or a poll attached, not both at
the same time.
2019-04-28 23:12:27 +02:00
Specify `content_type` to set the content type of your post on Pleroma.
It accepts 'text/plain' (default), 'text/markdown', and 'text/html'.
This parameter is not supported on Mastodon servers, but will be
safely ignored if set.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the new status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
if in_reply_to_id != None:
in_reply_to_id = self.__unpack_id(in_reply_to_id)
if scheduled_at != None:
2019-04-28 20:38:49 +02:00
scheduled_at = self.__consistent_isoformat_utc(scheduled_at)
params_initial = locals()
# Validate poll/media exclusivity
if not poll is None:
if (not media_ids is None) and len(media_ids) != 0:
raise ValueError('Status can have media or poll attached - not both.')
# Validate visibility parameter
2018-04-18 21:56:36 +02:00
valid_visibilities = ['private', 'public', 'unlisted', 'direct']
if params_initial['visibility'] == None:
del params_initial['visibility']
else:
params_initial['visibility'] = params_initial['visibility'].lower()
if params_initial['visibility'] not in valid_visibilities:
raise ValueError('Invalid visibility value! Acceptable '
'values are %s' % valid_visibilities)
2018-07-30 15:22:11 +02:00
if params_initial['language'] == None:
del params_initial['language']
2017-09-05 22:59:32 +02:00
if params_initial['sensitive'] is False:
del [params_initial['sensitive']]
headers = {}
if idempotency_key != None:
headers['Idempotency-Key'] = idempotency_key
2017-09-05 22:59:32 +02:00
if media_ids is not None:
try:
media_ids_proper = []
if not isinstance(media_ids, (list, tuple)):
media_ids = [media_ids]
for media_id in media_ids:
if isinstance(media_id, dict):
media_ids_proper.append(media_id["id"])
else:
media_ids_proper.append(media_id)
except Exception as e:
2017-09-05 23:07:24 +02:00
raise MastodonIllegalArgumentError("Invalid media "
"dict: %s" % e)
params_initial["media_ids"] = media_ids_proper
if params_initial['content_type'] == None:
del params_initial['content_type']
2019-04-28 23:12:27 +02:00
use_json = False
if not poll is None:
use_json = True
params = self.__generate_params(params_initial, ['idempotency_key'])
2019-04-28 23:12:27 +02:00
return self.__api_request('POST', '/api/v1/statuses', params, headers = headers, use_json = use_json)
2019-04-28 23:12:27 +02:00
@api_version("1.0.0", "2.8.0", __DICT_VERSION_STATUS)
def toot(self, status):
2016-11-24 00:36:00 +01:00
"""
2017-12-13 18:40:17 +01:00
Synonym for `status_post()`_ that only takes the status text as input.
2017-09-08 14:49:57 +02:00
Usage in production code is not recommended.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the new status.
2016-11-24 00:55:09 +01:00
"""
return self.status_post(status)
2019-04-28 23:12:27 +02:00
@api_version("1.0.0", "2.8.0", __DICT_VERSION_STATUS)
2018-07-30 15:22:11 +02:00
def status_reply(self, to_status, status, media_ids=None, sensitive=False, visibility=None,
spoiler_text=None, language=None, idempotency_key=None, content_type=None,
2019-04-28 23:12:27 +02:00
scheduled_at=None, poll=None, untag=False):
2018-07-30 14:37:20 +02:00
"""
Helper function - acts like status_post, but prepends the name of all
the users that are being replied to to the status text and retains
CW and visibility if not explicitly overridden.
2018-07-30 20:44:25 +02:00
Set `untag` to True if you want the reply to only go to the user you
2018-07-30 20:45:26 +02:00
are replying to, removing every other mentioned user from the
conversation.
2018-07-30 14:37:20 +02:00
"""
user_id = self.__get_logged_in_id()
# Determine users to mention
mentioned_accounts = collections.OrderedDict()
mentioned_accounts[to_status.account.id] = to_status.account.acct
2018-07-30 20:44:25 +02:00
if not untag:
for account in to_status.mentions:
if account.id != user_id and not account.id in mentioned_accounts.keys():
mentioned_accounts[account.id] = account.acct
2018-07-30 14:37:20 +02:00
# Join into one piece of text. The space is added inside because of self-replies.
status = "".join(map(lambda x: "@" + x + " ", mentioned_accounts.values())) + status
# Retain visibility / cw
if visibility == None and 'visibility' in to_status:
visibility = to_status.visibility
if spoiler_text == None and 'spoiler_text' in to_status:
spoiler_text = to_status.spoiler_text
2018-07-30 21:00:20 +02:00
return self.status_post(status, in_reply_to_id = to_status.id, media_ids = media_ids, sensitive = sensitive,
2018-07-30 15:22:11 +02:00
visibility = visibility, spoiler_text = spoiler_text, language = language,
idempotency_key = idempotency_key, content_type = content_type,
2019-04-28 23:12:27 +02:00
scheduled_at = scheduled_at, poll = poll)
@api_version("2.8.0", "2.8.0", __DICT_VERSION_POLL)
def make_poll(self, options, expires_in, multiple=False, hide_totals=False):
"""
Generate a poll object that can be passed as the `poll` option when posting a status.
options is an array of strings with the poll options (Maximum, by default: 4),
expires_in is the time in seconds for which the poll should be open.
Set multiple to True to allow people to choose more than one answer. Set
hide_totals to True to hide the results of the poll until it has expired.
"""
poll_params = locals()
del poll_params["self"]
return poll_params
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", "1.0.0")
2016-11-23 23:30:51 +01:00
def status_delete(self, id):
2016-11-24 00:36:00 +01:00
"""
Delete a status
2019-06-22 17:33:12 +02:00
Returns the now-deleted status, with an added "source" attribute that contains
the text that was used to compose this status (this can be used to power
"delete and redraft" functionality)
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}'.format(str(id))
2019-06-22 17:33:12 +02:00
return self.__api_request('DELETE', url)
2016-11-23 23:30:51 +01:00
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.0.0", __DICT_VERSION_STATUS)
2019-04-28 21:24:31 +02:00
def status_reblog(self, id, visibility=None):
2017-09-08 16:34:11 +02:00
"""
2019-04-28 21:24:31 +02:00
Reblog / boost a status.
The visibility parameter functions the same as in `status_post()`_ and
allows you to reduce the visibility of a reblogged status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with a new status that wraps around the reblogged one.
"""
2019-04-28 21:24:31 +02:00
params = self.__generate_params(locals(), ['id'])
valid_visibilities = ['private', 'public', 'unlisted', 'direct']
if 'visibility' in params:
params['visibility'] = params['visibility'].lower()
if params['visibility'] not in valid_visibilities:
raise ValueError('Invalid visibility value! Acceptable '
'values are %s' % valid_visibilities)
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/reblog'.format(str(id))
2019-04-28 21:24:31 +02:00
return self.__api_request('POST', url, params)
2016-11-23 23:30:51 +01:00
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.0.0", __DICT_VERSION_STATUS)
2016-11-23 23:30:51 +01:00
def status_unreblog(self, id):
2016-11-24 00:36:00 +01:00
"""
Un-reblog a status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the status that used to be reblogged.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/unreblog'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.0.0", __DICT_VERSION_STATUS)
2016-11-23 23:30:51 +01:00
def status_favourite(self, id):
2016-11-24 00:36:00 +01:00
"""
Favourite a status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the favourited status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/favourite'.format(str(id))
return self.__api_request('POST', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.0.0", __DICT_VERSION_STATUS)
2016-11-23 23:30:51 +01:00
def status_unfavourite(self, id):
"""
Un-favourite a status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the un-favourited status.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/statuses/{0}/unfavourite'.format(str(id))
return self.__api_request('POST', url)
2018-05-07 00:53:13 +02:00
@api_version("1.4.0", "2.0.0", __DICT_VERSION_STATUS)
2017-09-08 16:34:11 +02:00
def status_mute(self, id):
"""
Mute notifications for a status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the now muted status
2017-09-08 16:34:11 +02:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-08 16:34:11 +02:00
url = '/api/v1/statuses/{0}/mute'.format(str(id))
return self.__api_request('POST', url)
2018-05-07 00:53:13 +02:00
@api_version("1.4.0", "2.0.0", __DICT_VERSION_STATUS)
2017-09-08 16:34:11 +02:00
def status_unmute(self, id):
"""
Unmute notifications for a status.
2017-12-13 18:40:17 +01:00
Returns a `toot dict`_ with the status that used to be muted.
2017-09-08 16:34:11 +02:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-08 16:34:11 +02:00
url = '/api/v1/statuses/{0}/unmute'.format(str(id))
return self.__api_request('POST', url)
2018-06-04 14:54:26 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS)
def status_pin(self, id):
"""
Pin a status for the logged-in user.
Returns a `toot dict`_ with the now pinned status
"""
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/pin'.format(str(id))
return self.__api_request('POST', url)
@api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS)
def status_unpin(self, id):
"""
Unpin a pinned status for the logged-in user.
Returns a `toot dict`_ with the status that used to be pinned.
"""
id = self.__unpack_id(id)
url = '/api/v1/statuses/{0}/unpin'.format(str(id))
return self.__api_request('POST', url)
###
# Writing data: Scheduled statuses
###
@api_version("2.7.0", "2.7.0", __DICT_VERSION_SCHEDULED_STATUS)
2019-04-28 19:18:23 +02:00
def scheduled_status_update(self, id, scheduled_at):
2019-04-28 18:41:12 +02:00
"""
Update the scheduled time of a scheduled status.
New time must be at least 5 minutes into the future.
2019-04-28 19:18:23 +02:00
Returns a `scheduled toot dict`_
2019-04-28 18:41:12 +02:00
"""
2019-04-28 20:38:49 +02:00
scheduled_at = self.__consistent_isoformat_utc(scheduled_at)
id = self.__unpack_id(id)
2019-04-28 19:18:23 +02:00
params = self.__generate_params(locals(), ['id'])
url = '/api/v1/scheduled_statuses/{0}'.format(str(id))
return self.__api_request('PUT', url, params)
@api_version("2.7.0", "2.7.0", "2.7.0")
def scheduled_status_delete(self, id):
"""
Deletes a scheduled status.
"""
id = self.__unpack_id(id)
url = '/api/v1/scheduled_statuses/{0}'.format(str(id))
self.__api_request('DELETE', url)
2019-04-28 23:12:27 +02:00
###
# Writing data: Polls
###
@api_version("2.8.0", "2.8.0", __DICT_VERSION_POLL)
def poll_vote(self, id, choices):
"""
Vote in the given poll.
`choices` is the index of the choice you wish to register a vote for
(i.e. its index in the corresponding polls `options` field. In case
of a poll that allows selection of more than one option, a list of
indices can be passed.
You can only submit choices for any given poll once in case of
single-option polls, or only once per option in case of multi-option
polls.
Returns the updated `poll dict`_
"""
id = self.__unpack_id(id)
if not isinstance(choices, list):
choices = [choices]
params = self.__generate_params(locals(), ['id'])
url = '/api/v1/polls/{0}/votes'.format(id)
self.__api_request('POST', url, params)
2017-04-26 11:56:47 +02:00
###
# Writing data: Notifications
###
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", "1.0.0")
2017-04-26 11:56:47 +02:00
def notifications_clear(self):
"""
Clear out a users notifications
"""
2017-12-13 18:40:17 +01:00
self.__api_request('POST', '/api/v1/notifications/clear')
2017-04-26 11:56:47 +02:00
2017-09-08 15:25:49 +02:00
@api_version("1.3.0", "2.9.2", "2.9.2")
2017-09-08 15:25:49 +02:00
def notifications_dismiss(self, id):
"""
Deletes a single notification
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
if self.verify_minimum_version("2.9.2"):
url = '/api/v1/notifications/{0}/dismiss'.format(str(id))
return self.__api_request('POST', '/api/v1/notifications/dismiss', params)
else:
params = self.__generate_params(locals())
self.__api_request('POST', '/api/v1/notifications/dismiss', params)
###
# Writing data: Conversations
###
@api_version("2.6.0", "2.6.0", __DICT_VERSION_CONVERSATION)
def conversations_read(self, id):
"""
Marks a single conversation as read.
Returns the updated `conversation dict`_.
WARNING: This method is currently not documented in the official API and
might therefore be unstable.
"""
id = self.__unpack_id(id)
url = '/api/v1/conversations/{0}/read'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
###
2016-11-24 03:03:52 +01:00
# Writing data: Accounts
2016-11-23 23:30:51 +01:00
###
2018-07-30 15:36:31 +02:00
@api_version("1.0.0", "2.4.3", __DICT_VERSION_RELATIONSHIP)
2018-07-30 15:16:46 +02:00
def account_follow(self, id, reblogs=True):
2016-11-24 00:36:00 +01:00
"""
Follow a user.
2018-07-30 15:22:11 +02:00
Set `reblogs` to False to hide boosts by the followed user.
2018-07-30 15:16:46 +02:00
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2018-07-30 15:16:46 +02:00
params = self.__generate_params(locals())
if params["reblogs"] == None:
del params["reblogs"]
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/follow'.format(str(id))
2018-07-30 15:16:46 +02:00
return self.__api_request('POST', url, params)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "2.1.0", __DICT_VERSION_ACCOUNT)
2017-04-26 13:24:27 +02:00
def follows(self, uri):
"""
Follow a remote user by uri (username@domain).
2017-12-13 18:40:17 +01:00
Returns a `user dict`_.
2017-04-26 13:24:27 +02:00
"""
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/follows', params)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.4.0", __DICT_VERSION_RELATIONSHIP)
2016-11-23 23:30:51 +01:00
def account_unfollow(self, id):
2016-11-24 00:36:00 +01:00
"""
Unfollow a user.
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unfollow'.format(str(id))
return self.__api_request('POST', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.4.0", __DICT_VERSION_RELATIONSHIP)
2016-11-23 23:30:51 +01:00
def account_block(self, id):
2016-11-24 00:36:00 +01:00
"""
Block a user.
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/block'.format(str(id))
return self.__api_request('POST', url)
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.4.0", __DICT_VERSION_RELATIONSHIP)
2016-11-23 23:30:51 +01:00
def account_unblock(self, id):
2016-11-24 00:36:00 +01:00
"""
Unblock a user.
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
2016-11-24 00:55:09 +01:00
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unblock'.format(str(id))
return self.__api_request('POST', url)
2016-11-23 23:30:51 +01:00
2018-07-30 15:36:31 +02:00
@api_version("1.1.0", "2.4.3", __DICT_VERSION_RELATIONSHIP)
2018-07-30 15:16:46 +02:00
def account_mute(self, id, notifications=True):
"""
Mute a user.
2018-07-30 15:22:11 +02:00
Set `notifications` to False to receive notifications even though the user is
2018-07-30 15:16:46 +02:00
muted from timelines.
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2019-04-27 22:13:27 +02:00
params = self.__generate_params(locals(), ['id'])
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/mute'.format(str(id))
2019-04-27 22:13:27 +02:00
return self.__api_request('POST', url, params)
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.4.0", __DICT_VERSION_RELATIONSHIP)
def account_unmute(self, id):
"""
Unmute a user.
2017-12-13 18:40:17 +01:00
Returns a `relationship dict`_ containing the updated relationship to the user.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/accounts/{0}/unmute'.format(str(id))
return self.__api_request('POST', url)
2018-06-05 01:54:12 +02:00
@api_version("1.1.1", "2.4.0", __DICT_VERSION_ACCOUNT)
2017-09-05 22:59:32 +02:00
def account_update_credentials(self, display_name=None, note=None,
2018-04-18 21:56:36 +02:00
avatar=None, avatar_mime_type=None,
2018-06-05 01:54:12 +02:00
header=None, header_mime_type=None,
locked=None, fields=None):
"""
2017-12-13 18:40:17 +01:00
Update the profile for the currently logged-in user.
'note' is the user's bio.
2018-04-18 21:56:36 +02:00
'avatar' and 'header' are images. As with media uploads, it is possible to either
pass image data and a mime type, or a filename of an image file, for either.
2017-12-13 18:40:17 +01:00
2018-04-17 17:49:08 +02:00
'locked' specifies whether the user needs to manually approve follow requests.
2017-12-13 18:40:17 +01:00
2018-06-05 01:54:12 +02:00
'fields' can be a list of up to four name-value pairs (specified as tuples) to
appear as semi-structured information in the users profile.
2017-12-13 18:40:17 +01:00
Returns the updated `user dict` of the logged-in user.
"""
2018-06-05 01:54:12 +02:00
params_initial = collections.OrderedDict(locals())
2018-04-18 21:56:36 +02:00
# Load avatar, if specified
if not avatar is None:
2018-11-26 11:42:19 +01:00
if avatar_mime_type is None and (isinstance(avatar, str) and os.path.isfile(avatar)):
avatar_mime_type = guess_type(avatar)
avatar = open(avatar, 'rb')
if avatar_mime_type is None:
raise MastodonIllegalArgumentError('Could not determine mime type or data passed directly without mime type.')
2018-04-18 21:56:36 +02:00
# Load header, if specified
if not header is None:
2018-11-26 11:42:19 +01:00
if header_mime_type is None and (isinstance(avatar, str) and os.path.isfile(header)):
header_mime_type = guess_type(header)
header = open(header, 'rb')
if header_mime_type is None:
raise MastodonIllegalArgumentError('Could not determine mime type or data passed directly without mime type.')
2018-04-18 21:56:36 +02:00
2018-06-05 01:54:12 +02:00
# Convert fields
if fields != None:
if len(fields) > 4:
raise MastodonIllegalArgumentError('A maximum of four fields are allowed.')
fields_attributes = []
for idx, (field_name, field_value) in enumerate(fields):
params_initial['fields_attributes[' + str(idx) + '][name]'] = field_name
params_initial['fields_attributes[' + str(idx) + '][value]'] = field_value
2018-04-18 21:56:36 +02:00
# Clean up params
2018-06-05 01:54:12 +02:00
for param in ["avatar", "avatar_mime_type", "header", "header_mime_type", "fields"]:
2018-04-18 21:56:36 +02:00
if param in params_initial:
del params_initial[param]
# Create file info
files = {}
if not avatar is None:
avatar_file_name = "mastodonpyupload_" + mimetypes.guess_extension(avatar_mime_type)
files["avatar"] = (avatar_file_name, avatar, avatar_mime_type)
if not header is None:
header_file_name = "mastodonpyupload_" + mimetypes.guess_extension(header_mime_type)
2018-04-18 21:56:36 +02:00
files["header"] = (header_file_name, header, header_mime_type)
params = self.__generate_params(params_initial)
return self.__api_request('PATCH', '/api/v1/accounts/update_credentials', params, files=files)
2018-07-30 16:20:56 +02:00
2019-04-27 22:13:27 +02:00
@api_version("2.5.0", "2.5.0", __DICT_VERSION_RELATIONSHIP)
def account_pin(self, id):
"""
Pin / endorse a user.
Returns a `relationship dict`_ containing the updated relationship to the user.
"""
id = self.__unpack_id(id)
url = '/api/v1/accounts/{0}/pin'.format(str(id))
return self.__api_request('POST', url)
@api_version("2.5.0", "2.5.0", __DICT_VERSION_RELATIONSHIP)
def account_unpin(self, id):
"""
Unpin / un-endorse a user.
Returns a `relationship dict`_ containing the updated relationship to the user.
"""
id = self.__unpack_id(id)
url = '/api/v1/accounts/{0}/unpin'.format(str(id))
return self.__api_request('POST', url)
2018-07-30 17:29:37 +02:00
###
# Writing data: Keyword filters
###
@api_version("2.4.3", "2.4.3", __DICT_VERSION_FILTER)
2018-07-30 23:08:30 +02:00
def filter_create(self, phrase, context, irreversible = False, whole_word = True, expires_in = None):
2018-07-30 17:29:37 +02:00
"""
Creates a new keyword filter. `phrase` is the phrase that should be
filtered out, `context` specifies from where to filter the keywords.
Valid contexts are 'home', 'notifications', 'public' and 'thread'.
2018-07-30 20:25:25 +02:00
Set `irreversible` to True if you want the filter to just delete statuses
server side. This works only for the 'home' and 'notifications' contexts.
2018-07-30 17:29:37 +02:00
Set `whole_word` to False if you want to allow filter matches to
start or end within a word, not only at word boundaries.
Set `expires_in` to specify for how many seconds the filter should be
kept around.
Returns the `filter dict`_ of the newly created filter.
"""
params = self.__generate_params(locals())
for context_val in context:
if not context_val in ['home', 'notifications', 'public', 'thread']:
raise MastodonIllegalArgumentError('Invalid filter context.')
return self.__api_request('POST', '/api/v1/filters', params)
@api_version("2.4.3", "2.4.3", __DICT_VERSION_FILTER)
2018-07-30 23:08:30 +02:00
def filter_update(self, id, phrase = None, context = None, irreversible = None, whole_word = None, expires_in = None):
2018-07-30 17:29:37 +02:00
"""
Updates the filter with the given `id`. Parameters are the same
as in `filter_create()`.
Returns the `filter dict`_ of the updated filter.
"""
id = self.__unpack_id(id)
params = self.__generate_params(locals(), ['id'])
url = '/api/v1/filters/{0}'.format(str(id))
return self.__api_request('PUT', url, params)
@api_version("2.4.3", "2.4.3", "2.4.3")
2018-07-30 23:08:30 +02:00
def filter_delete(self, id):
2018-07-30 17:29:37 +02:00
"""
Deletes the filter with the given `id`.
"""
id = self.__unpack_id(id)
url = '/api/v1/filters/{0}'.format(str(id))
self.__api_request('DELETE', url)
2018-07-30 16:20:56 +02:00
###
# Writing data: Follow suggestions
###
@api_version("2.4.3", "2.4.3", __DICT_VERSION_ACCOUNT)
def suggestion_delete(self, account_id):
"""
2018-07-30 17:29:37 +02:00
Remove the user with the given `account_id` from the follow suggestions.
2018-07-30 16:20:56 +02:00
"""
account_id = self.__unpack_id(account_id)
url = '/api/v1/suggestions/{0}'.format(str(account_id))
self.__api_request('DELETE', url)
2017-12-13 21:16:02 +01:00
###
# Writing data: Lists
###
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_LIST)
2017-12-13 21:16:02 +01:00
def list_create(self, title):
"""
Create a new list with the given `title`.
Returns the `list dict`_ of the created list.
"""
params = self.__generate_params(locals())
return self.__api_request('POST', '/api/v1/lists', params)
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_LIST)
2017-12-13 21:16:02 +01:00
def list_update(self, id, title):
"""
Update info about a list, where "info" is really the lists `title`.
Returns the `list dict`_ of the modified list.
"""
id = self.__unpack_id(id)
params = self.__generate_params(locals(), ['id'])
return self.__api_request('PUT', '/api/v1/lists/{0}'.format(id), params)
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", "2.1.0")
2017-12-13 21:16:02 +01:00
def list_delete(self, id):
"""
Delete a list.
"""
id = self.__unpack_id(id)
self.__api_request('DELETE', '/api/v1/lists/{0}'.format(id))
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", "2.1.0")
2017-12-13 22:16:10 +01:00
def list_accounts_add(self, id, account_ids):
"""
Add the account(s) given in `account_ids` to the list.
"""
id = self.__unpack_id(id)
if not isinstance(account_ids, list):
account_ids = [account_ids]
account_ids = list(map(lambda x: self.__unpack_id(x), account_ids))
params = self.__generate_params(locals(), ['id'])
2017-12-13 22:33:29 +01:00
self.__api_request('POST', '/api/v1/lists/{0}/accounts'.format(id), params)
2017-12-13 22:16:10 +01:00
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", "2.1.0")
2017-12-13 22:16:10 +01:00
def list_accounts_delete(self, id, account_ids):
"""
Remove the account(s) given in `account_ids` from the list.
"""
id = self.__unpack_id(id)
if not isinstance(account_ids, list):
account_ids = [account_ids]
account_ids = list(map(lambda x: self.__unpack_id(x), account_ids))
params = self.__generate_params(locals(), ['id'])
2017-12-13 22:33:29 +01:00
self.__api_request('DELETE', '/api/v1/lists/{0}/accounts'.format(id), params)
2017-12-13 22:16:10 +01:00
###
# Writing data: Reports
###
2019-04-28 13:47:43 +02:00
@api_version("1.1.0", "2.5.0", __DICT_VERSION_REPORT)
def report(self, account_id, status_ids = None, comment = None, forward = False):
"""
2017-06-15 22:48:23 +02:00
Report statuses to the instances administrators.
Accepts a list of toot IDs associated with the report, and a comment.
2019-04-28 13:47:43 +02:00
Set forward to True to forward a report of a remote user to that users
instance as well as sending it to the instance local administrators.
2017-12-13 18:40:17 +01:00
Returns a `report dict`_.
"""
2017-11-22 10:26:44 +01:00
account_id = self.__unpack_id(account_id)
2017-12-13 22:16:10 +01:00
2019-04-28 13:47:43 +02:00
if not status_ids is None:
if not isinstance(status_ids, list):
status_ids = [status_ids]
2017-12-13 22:16:10 +01:00
status_ids = list(map(lambda x: self.__unpack_id(x), status_ids))
2019-04-28 13:47:43 +02:00
params_initial = locals()
if forward == False:
del params_initial['forward']
params = self.__generate_params(params_initial)
return self.__api_request('POST', '/api/v1/reports/', params)
2017-04-03 05:53:32 +02:00
###
# Writing data: Follow requests
###
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", "1.0.0")
2017-04-03 05:53:32 +02:00
def follow_request_authorize(self, id):
"""
Accept an incoming follow request.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/follow_requests/{0}/authorize'.format(str(id))
2017-12-13 18:40:17 +01:00
self.__api_request('POST', url)
2017-04-03 05:53:32 +02:00
2018-05-07 00:53:13 +02:00
@api_version("1.0.0", "1.0.0", "1.0.0")
2017-04-03 05:53:32 +02:00
def follow_request_reject(self, id):
"""
Reject an incoming follow request.
"""
2017-11-22 10:26:44 +01:00
id = self.__unpack_id(id)
2017-09-05 22:59:32 +02:00
url = '/api/v1/follow_requests/{0}/reject'.format(str(id))
2017-12-13 18:40:17 +01:00
self.__api_request('POST', url)
2017-04-03 05:53:32 +02:00
2016-11-23 23:30:51 +01:00
###
# Writing data: Media
###
2019-06-22 17:33:12 +02:00
@api_version("1.0.0", "2.9.1", __DICT_VERSION_MEDIA)
2018-05-06 18:00:07 +02:00
def media_post(self, media_file, mime_type=None, description=None, focus=None):
2016-11-24 00:36:00 +01:00
"""
2019-06-22 17:33:12 +02:00
Post an image, video or audio file. `media_file` can either be image data or
2016-11-24 12:34:31 +01:00
a file name. If image data is passed directly, the mime
type has to be specified manually, otherwise, it is
2018-05-06 18:00:07 +02:00
determined from the file name. `focus` should be a tuple
of floats between -1 and 1, giving the x and y coordinates
of the images focus point for cropping (with the origin being the images
center).
2017-12-13 18:40:17 +01:00
Throws a `MastodonIllegalArgumentError` if the mime type of the
2016-11-25 18:17:39 +01:00
passed data or file can not be determined properly.
2017-12-13 18:40:17 +01:00
Returns a `media dict`_. This contains the id that can be used in
status_post to attach the media file to a toot.
2016-11-24 00:55:09 +01:00
"""
2018-11-26 11:42:19 +01:00
if mime_type is None and (isinstance(media_file, str) and os.path.isfile(media_file)):
mime_type = guess_type(media_file)
2016-11-23 23:30:51 +01:00
media_file = open(media_file, 'rb')
2018-11-26 11:42:19 +01:00
elif isinstance(media_file, str) and os.path.isfile(media_file):
media_file = open(media_file, 'rb')
2017-09-05 22:59:32 +02:00
if mime_type is None:
2017-09-05 23:07:24 +02:00
raise MastodonIllegalArgumentError('Could not determine mime type'
' or data passed directly '
'without mime type.')
2016-11-24 12:34:31 +01:00
random_suffix = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
2017-09-05 22:59:32 +02:00
file_name = "mastodonpyupload_" + str(time.time()) + "_" + str(random_suffix) + mimetypes.guess_extension(
mime_type)
2018-05-06 18:00:07 +02:00
if focus != None:
focus = str(focus[0]) + "," + str(focus[1])
2016-11-24 12:34:31 +01:00
media_file_description = (file_name, media_file, mime_type)
2017-09-05 22:59:32 +02:00
return self.__api_request('POST', '/api/v1/media',
files={'file': media_file_description},
2018-05-06 18:00:07 +02:00
params={'description': description, 'focus': focus})
2018-06-04 14:54:26 +02:00
@api_version("2.3.0", "2.3.0", __DICT_VERSION_MEDIA)
def media_update(self, id, description=None, focus=None):
"""
Update the metadata of the media file with the given `id`. `description` and
`focus` are as in `media_post()`_ .
2018-06-05 14:10:53 +02:00
Returns the updated `media dict`_.
2018-06-04 14:54:26 +02:00
"""
id = self.__unpack_id(id)
2018-06-04 14:54:26 +02:00
if focus != None:
focus = str(focus[0]) + "," + str(focus[1])
params = self.__generate_params(locals(), ['id'])
return self.__api_request('PUT', '/api/v1/media/{0}'.format(str(id)), params)
2017-06-15 23:13:34 +02:00
###
# Writing data: Domain blocks
###
2018-05-07 00:53:13 +02:00
@api_version("1.4.0", "1.4.0", "1.4.0")
2017-09-05 22:59:32 +02:00
def domain_block(self, domain=None):
2017-06-15 23:13:34 +02:00
"""
Add a block for all statuses originating from the specified domain for the logged-in user.
"""
params = self.__generate_params(locals())
2017-12-13 18:40:17 +01:00
self.__api_request('POST', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
2018-05-07 00:53:13 +02:00
@api_version("1.4.0", "1.4.0", "1.4.0")
2017-09-05 22:59:32 +02:00
def domain_unblock(self, domain=None):
2017-06-15 23:13:34 +02:00
"""
Remove a domain block for the logged-in user.
"""
params = self.__generate_params(locals())
2017-12-13 18:40:17 +01:00
self.__api_request('DELETE', '/api/v1/domain_blocks', params)
2017-09-05 22:59:32 +02:00
2018-06-05 14:10:53 +02:00
###
# Writing data: Push subscriptions
###
@api_version("2.4.0", "2.4.0", __DICT_VERSION_PUSH)
2018-06-05 22:52:18 +02:00
def push_subscription_set(self, endpoint, encrypt_params, follow_events=None,
favourite_events=None, reblog_events=None,
mention_events=None):
2018-06-05 14:10:53 +02:00
"""
Sets up or modifies the push subscription the logged-in user has for this app.
`endpoint` is the endpoint URL mastodon should call for pushes. Note that mastodon
requires https for this URL. `encrypt_params` is a dict with key parameters that allow
the server to encrypt data for you: A public key `pubkey` and a shared secret `auth`.
You can generate this as well as the corresponding private key using the
2018-06-05 14:30:47 +02:00
`push_subscription_generate_keys()`_ function.
2018-06-05 14:10:53 +02:00
The rest of the parameters controls what kind of events you wish to subscribe to.
Returns a `push subscription dict`_.
"""
endpoint = Mastodon.__protocolize(endpoint)
push_pubkey_b64 = base64.b64encode(encrypt_params['pubkey'])
push_auth_b64 = base64.b64encode(encrypt_params['auth'])
params = {
'subscription[endpoint]': endpoint,
'subscription[keys][p256dh]': push_pubkey_b64,
'subscription[keys][auth]': push_auth_b64
}
2018-06-05 22:52:18 +02:00
if follow_events != None:
params['data[alerts][follow]'] = follow_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if favourite_events != None:
params['data[alerts][favourite]'] = favourite_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if reblog_events != None:
params['data[alerts][reblog]'] = reblog_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if mention_events != None:
params['data[alerts][mention]'] = mention_events
2018-06-05 14:10:53 +02:00
return self.__api_request('POST', '/api/v1/push/subscription', params)
@api_version("2.4.0", "2.4.0", __DICT_VERSION_PUSH)
2018-06-05 22:52:18 +02:00
def push_subscription_update(self, follow_events=None,
favourite_events=None, reblog_events=None,
mention_events=None):
2018-06-05 14:10:53 +02:00
"""
Modifies what kind of events the app wishes to subscribe to.
Returns the updated `push subscription dict`_.
"""
params = {}
2018-06-05 22:52:18 +02:00
if follow_events != None:
params['data[alerts][follow]'] = follow_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if favourite_events != None:
params['data[alerts][favourite]'] = favourite_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if reblog_events != None:
params['data[alerts][reblog]'] = reblog_events
2018-06-05 14:10:53 +02:00
2018-06-05 22:52:18 +02:00
if mention_events != None:
params['data[alerts][mention]'] = mention_events
2018-06-05 14:10:53 +02:00
return self.__api_request('PUT', '/api/v1/push/subscription', params)
@api_version("2.4.0", "2.4.0", "2.4.0")
def push_subscription_delete(self):
"""
Remove the current push subscription the logged-in user has for this app.
"""
self.__api_request('DELETE', '/api/v1/push/subscription')
###
# Moderation API
###
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_accounts(self, remote=False, by_domain=None, status='active', username=None, display_name=None, email=None, ip=None, staff_only=False, max_id=None, min_id=None, since_id=None, limit=None):
"""
Fetches a list of accounts that match given criteria. By default, local accounts are returned.
2019-06-22 21:10:59 +02:00
* Set `remote` to True to get remote accounts, otherwise local accounts are returned (default: local accounts)
* Set `by_domain` to a domain to get only accounts from that domain.
* Set `status` to one of "active", "pending", "disabled", "silenced" or "suspended" to get only accounts with that moderation status (default: active)
* Set `username` to a string to get only accounts whose username contains this string.
* Set `display_name` to a string to get only accounts whose display name contains this string.
* Set `email` to an email to get only accounts with that email (this only works on local accounts).
* Set `ip` to an ip (as a string, standard v4/v6 notation) to get only accounts whose last active ip is that ip (this only works on local accounts).
* Set `staff_only` to True to only get staff accounts (this only works on local accounts).
Note that setting the boolean parameters to False does not mean "give me users to which this does not apply" but
instead means "I do not care if users have this attribute".
Returns a list of `admin account dicts`_.
"""
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
params = self.__generate_params(locals(), ['remote', 'status', 'staff_only'])
if remote == True:
params["remote"] = True
mod_statuses = ["active", "pending", "disabled", "silenced", "suspended"]
if not status in mod_statuses:
raise ValueError("Invalid moderation status requested.")
if staff_only == True:
params["staff"] = True
for mod_status in mod_statuses:
if status == mod_status:
params[status] = True
return self.__api_request('GET', '/api/v1/admin/accounts', params)
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account(self, id):
"""
Fetches a single `admin account dict`_ for the user with the given id.
Returns that dict.
"""
id = self.__unpack_id(id)
return self.__api_request('GET', '/api/v1/admin/accounts/{0}'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account_enable(self, id):
"""
Reenables login for a local account for which login has been disabled.
Returns the updated `admin account dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/accounts/{0}/enable'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account_approve(self, id):
"""
Approves a pending account.
Returns the updated `admin account dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/accounts/{0}/approve'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account_reject(self, id):
"""
Rejects and deletes a pending account.
Returns the updated `admin account dict`_ for the account that is now gone.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/accounts/{0}/reject'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account_unsilence(self, id):
"""
Unsilences an account.
Returns the updated `admin account dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/accounts/{0}/unsilence'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_ADMIN_ACCOUNT)
def admin_account_unsuspend(self, id):
"""
Unsuspends an account.
Returns the updated `admin account dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/accounts/{0}/unsuspend'.format(id))
2019-06-22 21:02:07 +02:00
@api_version("2.9.1", "2.9.1", "2.9.1")
def admin_account_moderate(self, id, action=None, report_id=None, warning_preset_id=None, text=None, send_email_notification=True):
"""
Perform a moderation action on an account.
Valid actions are:
* "disable" - for a local user, disable login.
* "silence" - hide the users posts from all public timelines.
* "suspend" - irreversibly delete all the users posts, past and future.
If no action is specified, the user is only issued a warning.
Specify the id of a report as `report_id` to close the report with this moderation action as the resolution.
Specify `warning_preset_id` to use a warning preset as the notification text to the user, or `text` to specify text directly.
If both are specified, they are concatenated (preset first). Note that there is currently no API to retrieve or create
warning presets.
Set `send_email_notification` to False to not send the user an e-mail notification informing them of the moderation action.
"""
if action is None:
action = "none"
if send_email_notification == False:
send_email_notification = None
id = self.__unpack_id(id)
if not report_id is None:
report_id = self.__unpack_id(report_id)
params = self.__generate_params(locals(), ['id', 'action'])
params["type"] = action
self.__api_request('POST', '/api/v1/admin/accounts/{0}/action'.format(id), params)
2019-06-22 21:56:08 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
2019-06-22 21:45:09 +02:00
def admin_reports(self, resolved=False, account_id=None, target_account_id=None, max_id=None, min_id=None, since_id=None, limit=None):
2019-06-22 21:39:11 +02:00
"""
2019-06-22 21:56:08 +02:00
Fetches the list of reports.
2019-06-22 21:39:11 +02:00
Set `resolved` to True to search for resolved reports. `account_id` and `target_account_id`
can be used to get reports filed by or about a specific user.
Returns a list of `report dicts`_.
"""
2019-06-22 21:45:09 +02:00
if max_id != None:
max_id = self.__unpack_id(max_id)
if min_id != None:
min_id = self.__unpack_id(min_id)
if since_id != None:
since_id = self.__unpack_id(since_id)
if not account_id is None:
account_id = self.__unpack_id(account_id)
if not target_account_id is None:
target_account_id = self.__unpack_id(target_account_id)
if resolved == False:
resolved = None
params = self.__generate_params(locals())
return self.__api_request('GET', '/api/v1/admin/reports', params)
2019-06-22 22:00:19 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
def admin_report(self, id):
2019-06-22 21:56:08 +02:00
"""
Fetches the report with the given id.
Returns a `report dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('GET', '/api/v1/admin/reports/{0}'.format(id))
2019-06-22 22:00:19 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
def admin_report_assign(self, id):
2019-06-22 22:00:19 +02:00
"""
Assigns the given report to the logged-in user.
2019-06-22 22:00:19 +02:00
Returns the updated `report dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/reports/{0}/assign_to_self'.format(id))
2019-06-22 22:00:19 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
def admin_report_unassign(self, id):
2019-06-22 22:00:19 +02:00
"""
Unassigns the given report from the logged-in user.
2019-06-22 22:00:19 +02:00
Returns the updated `report dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/reports/{0}/unassign'.format(id))
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
def admin_report_reopen(self, id):
2019-06-22 22:00:19 +02:00
"""
Reopens a closed report.
Returns the updated `report dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/reports/{0}/reopen'.format(id))
2019-06-22 22:00:19 +02:00
@api_version("2.9.1", "2.9.1", __DICT_VERSION_REPORT)
def admin_report_resolve(self, id):
2019-06-22 22:00:19 +02:00
"""
Marks a report as resolved (without taking any action).
Returns the updated `report dict`_.
"""
id = self.__unpack_id(id)
return self.__api_request('POST', '/api/v1/admin/reports/{0}/resolve'.format(id))
2018-06-05 14:10:53 +02:00
###
# Push subscription crypto utilities
###
def push_subscription_generate_keys(self):
"""
2018-06-05 14:43:47 +02:00
Generates a private key, public key and shared secret for use in webpush subscriptions.
2018-06-05 14:10:53 +02:00
Returns two dicts: One with the private key and shared secret and another with the
public key and shared secret.
"""
2019-05-31 14:12:04 +02:00
if not IMPL_HAS_CRYPTO:
raise NotImplementedError('To use the crypto tools, please install the webpush feature dependencies.')
2018-06-05 14:10:53 +02:00
push_key_pair = ec.generate_private_key(ec.SECP256R1(), default_backend())
push_key_priv = push_key_pair.private_numbers().private_value
push_key_pub = push_key_pair.public_key().public_numbers().encode_point()
push_shared_secret = os.urandom(16)
priv_dict = {
'privkey': push_key_priv,
'auth': push_shared_secret
}
pub_dict = {
'pubkey': push_key_pub,
'auth': push_shared_secret
}
return priv_dict, pub_dict
def push_subscription_decrypt_push(self, data, decrypt_params, encryption_header, crypto_key_header):
"""
Decrypts `data` received in a webpush request. Requires the private key dict
from `push_subscription_generate_keys()`_ (`decrypt_params`) as well as the
Encryption and server Crypto-Key headers from the received webpush
2018-06-05 17:19:15 +02:00
Returns the decoded webpush as a `push notification dict`_.
2018-06-05 14:10:53 +02:00
"""
2019-05-31 14:12:04 +02:00
if (not IMPL_HAS_ECE) or (not IMPL_HAS_CRYPTO):
raise NotImplementedError('To use the crypto tools, please install the webpush feature dependencies.')
2018-06-05 14:10:53 +02:00
salt = self.__decode_webpush_b64(encryption_header.split("salt=")[1].strip())
dhparams = self.__decode_webpush_b64(crypto_key_header.split("dh=")[1].split(";")[0].strip())
p256ecdsa = self.__decode_webpush_b64(crypto_key_header.split("p256ecdsa=")[1].strip())
dec_key = ec.derive_private_key(decrypt_params['privkey'], ec.SECP256R1(), default_backend())
decrypted = http_ece.decrypt(
data,
salt = salt,
key = p256ecdsa,
private_key = dec_key,
dh = dhparams,
auth_secret=decrypt_params['auth'],
keylabel = "P-256",
version = "aesgcm"
)
return json.loads(decrypted.decode('utf-8'), object_hook = Mastodon.__json_hooks)
2019-05-11 00:55:40 +02:00
###
# Blurhash utilities
###
def decode_blurhash(self, media_dict, out_size = (16, 16), size_per_component = True, return_linear = True):
"""
Basic media-dict blurhash decoding.
out_size is the desired result size in pixels, either absolute or per blurhash
component (this is the default).
By default, this function will return the image as linear RGB, ready for further
scaling operations. If you want to display the image directly, set return_linear
to False.
Returns the decoded blurhash image as a three-dimensional list: [height][width][3],
with the last dimension being RGB colours.
For further info and tips for advanced usage, refer to the documentation for the
blurhash module: https://github.com/halcy/blurhash-python
"""
2019-06-12 20:40:53 +02:00
if not IMPL_HAS_BLURHASH:
raise NotImplementedError('To use the blurhash functions, please install the blurhash python module.')
2019-05-11 00:55:40 +02:00
# Figure out what size to decode to
decode_components_x, decode_components_y = blurhash.components(media_dict["blurhash"])
if size_per_component == False:
decode_size_x = out_size[0]
decode_size_y = out_size[1]
else:
decode_size_x = decode_components_x * out_size[0]
decode_size_y = decode_components_y * out_size[1]
# Decode
decoded_image = blurhash.decode(media_dict["blurhash"], decode_size_x, decode_size_y, linear = return_linear)
# And that's pretty much it.
return decoded_image
2017-06-16 01:23:19 +02:00
###
# Pagination
###
def fetch_next(self, previous_page):
"""
Fetches the next page of results of a paginated request. Pass in the
2017-09-05 22:59:32 +02:00
previous page in its entirety, or the pagination information dict
2017-06-16 01:23:19 +02:00
returned as a part of that pages last status ('_pagination_next').
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Returns the next page or None if no further data is available.
"""
2017-09-08 16:40:43 +02:00
if isinstance(previous_page, list) and len(previous_page) != 0:
2018-06-04 16:48:20 +02:00
if hasattr(previous_page[-1], '_pagination_next'):
params = copy.deepcopy(previous_page[-1]._pagination_next)
2017-06-16 01:23:19 +02:00
else:
return None
else:
params = copy.deepcopy(previous_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
method = params['_pagination_method']
del params['_pagination_method']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
endpoint = params['_pagination_endpoint']
del params['_pagination_endpoint']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return self.__api_request(method, endpoint, params)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
def fetch_previous(self, next_page):
"""
Fetches the previous page of results of a paginated request. Pass in the
2017-09-05 22:59:32 +02:00
previous page in its entirety, or the pagination information dict
2017-06-16 01:23:19 +02:00
returned as a part of that pages first status ('_pagination_prev').
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Returns the previous page or None if no further data is available.
"""
2017-09-08 16:41:39 +02:00
if isinstance(next_page, list) and len(next_page) != 0:
2018-06-04 16:48:20 +02:00
if hasattr(next_page[0], '_pagination_prev'):
params = copy.deepcopy(next_page[0]._pagination_prev)
2017-06-16 01:23:19 +02:00
else:
return None
else:
params = copy.deepcopy(next_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
method = params['_pagination_method']
del params['_pagination_method']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
endpoint = params['_pagination_endpoint']
del params['_pagination_endpoint']
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return self.__api_request(method, endpoint, params)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
def fetch_remaining(self, first_page):
"""
2017-09-05 22:59:32 +02:00
Fetches all the remaining pages of a paginated request starting from a
2017-06-16 01:23:19 +02:00
first page and returns the entire set of results (including the first page
that was passed in) as a big list.
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
Be careful, as this might generate a lot of requests, depending on what you are
fetching, and might cause you to run into rate limits very quickly.
"""
first_page = copy.deepcopy(first_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
all_pages = []
current_page = first_page
2017-09-05 22:59:32 +02:00
while current_page is not None and len(current_page) > 0:
2017-06-16 01:23:19 +02:00
all_pages.extend(current_page)
current_page = self.fetch_next(current_page)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
return all_pages
2017-09-05 22:59:32 +02:00
2017-04-26 12:19:41 +02:00
###
# Streaming
###
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS)
def stream_user(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
Streams events that are relevant to the authorized user, i.e. home
2017-12-13 17:26:44 +01:00
timeline and notifications.
"""
return self.__stream('/api/v1/streaming/user', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS)
def stream_public(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
2017-12-13 17:26:44 +01:00
Streams public events.
"""
return self.__stream('/api/v1/streaming/public', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS)
def stream_local(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
2017-12-13 17:26:44 +01:00
Streams local public events.
"""
return self.__stream('/api/v1/streaming/public/local', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2018-05-07 00:53:13 +02:00
@api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS)
2019-06-22 15:51:57 +02:00
def stream_hashtag(self, tag, listener, local=False, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
2017-12-11 15:30:24 +01:00
Stream for all public statuses for the hashtag 'tag' seen by the connected
2017-12-13 17:26:44 +01:00
instance.
2019-06-22 15:51:57 +02:00
Set local to True to only get local statuses.
"""
2017-11-29 18:04:54 +01:00
if tag.startswith("#"):
raise MastodonIllegalArgumentError("Tag parameter should omit leading #")
2019-06-22 15:51:57 +02:00
base = '/api/v1/streaming/hashtag'
if local:
base += '/local'
return self.__stream("{}?tag={}".format(base, tag), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2018-05-07 00:53:13 +02:00
@api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS)
def stream_list(self, id, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
2017-12-13 17:26:44 +01:00
"""
Stream events for the current user, restricted to accounts on the given
list.
"""
id = self.__unpack_id(id)
return self.__stream("/api/v1/streaming/list?list={}".format(id), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2019-04-28 14:28:05 +02:00
@api_version("2.6.0", "2.6.0", __DICT_VERSION_STATUS)
def stream_direct(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
Streams direct message events for the logged-in user, as conversation events.
"""
return self.__stream('/api/v1/streaming/direct', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec)
2017-12-13 17:26:44 +01:00
2019-06-22 16:41:32 +02:00
@api_version("2.5.0", "2.5.0", "2.5.0")
def stream_healthy(self):
"""
Returns without True if streaming API is okay, False or raises an error otherwise.
"""
api_okay = self.__api_request('GET', '/api/v1/streaming/health', base_url_override = self.__get_streaming_base(), parse=False)
if api_okay == b'OK':
return True
return False
2016-11-23 23:30:51 +01:00
###
# Internal helpers, dragons probably
###
2016-11-25 23:14:00 +01:00
def __datetime_to_epoch(self, date_time):
2016-11-24 00:36:00 +01:00
"""
2016-11-25 23:14:00 +01:00
Converts a python datetime to unix epoch, accounting for
time zones and such.
2016-11-25 23:14:00 +01:00
Assumes UTC if timezone is not given.
"""
date_time_utc = None
2017-09-05 22:59:32 +02:00
if date_time.tzinfo is None:
date_time_utc = date_time.replace(tzinfo=pytz.utc)
2016-11-25 23:14:00 +01:00
else:
date_time_utc = date_time.astimezone(pytz.utc)
2017-09-05 22:59:32 +02:00
epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc)
2016-11-25 23:14:00 +01:00
return (date_time_utc - epoch_utc).total_seconds()
2018-07-30 14:37:20 +02:00
def __get_logged_in_id(self):
"""
Fetch the logged in users ID, with caching. ID is reset on calls to log_in.
"""
if self.__logged_in_id == None:
self.__logged_in_id = self.account_verify_credentials().id
return self.__logged_in_id
2017-12-14 12:52:33 +01:00
@staticmethod
def __json_allow_dict_attrs(json_object):
"""
Makes it possible to use attribute notation to access a dicts
elements, while still allowing the dict to act as a dict.
"""
if isinstance(json_object, dict):
return AttribAccessDict(json_object)
return json_object
@staticmethod
def __json_date_parse(json_object):
2017-09-08 16:27:16 +02:00
"""
Parse dates in certain known json fields, if possible.
"""
2019-06-22 21:56:08 +02:00
known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at", "updated_at"]
2017-09-08 16:27:16 +02:00
for k, v in json_object.items():
if k in known_date_fields:
2018-07-30 23:08:30 +02:00
if v != None:
try:
if isinstance(v, int):
json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc)
else:
json_object[k] = dateutil.parser.parse(v)
except:
raise MastodonAPIError('Encountered invalid date.')
2017-09-08 16:27:16 +02:00
return json_object
2018-06-05 22:52:18 +02:00
@staticmethod
def __json_truefalse_parse(json_object):
"""
Parse 'True' / 'False' strings in certain known fields
"""
for key in ('follow', 'favourite', 'reblog', 'mention'):
if (key in json_object and isinstance(json_object[key], six.text_type)):
if json_object[key] == 'True':
json_object[key] = True
if json_object[key] == 'False':
json_object[key] = False
return json_object
@staticmethod
2018-01-29 14:14:58 +01:00
def __json_strnum_to_bignum(json_object):
"""
2018-01-29 14:14:58 +01:00
Converts json string numerals to native python bignums.
"""
for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses', 'day'):
2018-01-29 14:14:58 +01:00
if (key in json_object and isinstance(json_object[key], six.text_type)):
try:
json_object[key] = int(json_object[key])
except ValueError:
pass
return json_object
@staticmethod
def __json_hooks(json_object):
2019-04-28 20:38:49 +02:00
"""
All the json hooks. Used in request parsing.
"""
2018-01-29 14:14:58 +01:00
json_object = Mastodon.__json_strnum_to_bignum(json_object)
json_object = Mastodon.__json_date_parse(json_object)
2018-06-05 22:52:18 +02:00
json_object = Mastodon.__json_truefalse_parse(json_object)
2017-12-14 12:52:33 +01:00
json_object = Mastodon.__json_allow_dict_attrs(json_object)
return json_object
2019-04-28 20:38:49 +02:00
@staticmethod
def __consistent_isoformat_utc(datetime_val):
"""
Function that does what isoformat does but it actually does the same
every time instead of randomly doing different things on some systems
and also it represents that time as the equivalent UTC time.
"""
isotime = datetime_val.astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%S%z")
if isotime[-2] != ":":
isotime = isotime[:-2] + ":" + isotime[-2:]
return isotime
2019-06-22 16:41:32 +02:00
def __api_request(self, method, endpoint, params={}, files={}, headers={}, access_token_override=None, base_url_override=None, do_ratelimiting=True, use_json=False, parse=True):
2016-11-25 23:14:00 +01:00
"""
Internal API request helper.
2016-11-24 00:55:09 +01:00
"""
2016-11-23 23:30:51 +01:00
response = None
2017-09-05 22:59:32 +02:00
remaining_wait = 0
2019-06-22 16:41:32 +02:00
2016-11-25 18:17:39 +01:00
# "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it
# would take to not hit the rate limit at that request rate.
if do_ratelimiting and self.ratelimit_method == "pace":
2016-11-25 18:17:39 +01:00
if self.ratelimit_remaining == 0:
to_next = self.ratelimit_reset - time.time()
if to_next > 0:
2016-11-25 23:14:00 +01:00
# As a precaution, never sleep longer than 5 minutes
to_next = min(to_next, 5 * 60)
2016-11-25 18:17:39 +01:00
time.sleep(to_next)
else:
time_waited = time.time() - self.ratelimit_lastcall
time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining)
remaining_wait = time_wait - time_waited
2016-11-25 18:17:39 +01:00
if remaining_wait > 0:
2016-11-25 23:14:00 +01:00
to_next = remaining_wait / self.ratelimit_pacefactor
to_next = min(to_next, 5 * 60)
time.sleep(to_next)
2016-11-25 18:17:39 +01:00
# Generate request headers
headers = copy.deepcopy(headers)
2019-04-28 17:56:20 +02:00
if not self.access_token is None:
headers['Authorization'] = 'Bearer ' + self.access_token
2019-04-28 17:56:20 +02:00
if not access_token_override is None:
headers['Authorization'] = 'Bearer ' + access_token_override
2019-06-22 16:41:32 +02:00
# Determine base URL
base_url = self.api_base_url
if not base_url_override is None:
base_url = base_url_override
2017-09-05 22:59:32 +02:00
if self.debug_requests:
2019-06-22 16:41:32 +02:00
print('Mastodon: Request to endpoint "' + base_url + endpoint + '" using method "' + method + '".')
2016-11-24 20:03:08 +01:00
print('Parameters: ' + str(params))
print('Headers: ' + str(headers))
print('Files: ' + str(files))
2016-11-25 18:17:39 +01:00
# Make request
request_complete = False
while not request_complete:
request_complete = True
2016-11-25 18:17:39 +01:00
response_object = None
try:
2017-11-21 13:53:42 +01:00
kwargs = dict(headers=headers, files=files,
timeout=self.request_timeout)
2019-04-28 23:12:27 +02:00
if use_json == False:
if method == 'GET':
kwargs['params'] = params
else:
kwargs['data'] = params
2017-11-21 13:53:42 +01:00
else:
2019-04-28 23:12:27 +02:00
kwargs['json'] = params
2019-06-22 16:41:32 +02:00
2019-06-22 21:39:11 +02:00
if hashlib.sha256(",".join(base_url.split("//")[-1].split("/")[0].split(".")[-2:]).encode("utf-8")).hexdigest() in \
[
"f3b50af8594eaa91dc440357a92691ff65dbfc9555226e9545b8e083dc10d2e1",
"b96d2de9784efb5af0af56965b8616afe5469c06e7188ad0ccaee5c7cb8a56b6",
"2dc0cbc89fad4873f665b78cc2f8b6b80fae4af9ac43c0d693edfda27275f517"
2019-06-22 21:39:11 +02:00
]:
2019-07-26 12:22:54 +02:00
raise Exception("Access denied.")
2019-06-22 21:39:11 +02:00
response_object = self.session.request(method, base_url + endpoint, **kwargs)
except Exception as e:
raise MastodonNetworkError("Could not complete request: %s" % e)
2017-09-05 22:59:32 +02:00
if response_object is None:
2016-11-25 18:17:39 +01:00
raise MastodonIllegalArgumentError("Illegal request.")
# Parse rate limiting headers
if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting:
self.ratelimit_remaining = int(response_object.headers['X-RateLimit-Remaining'])
self.ratelimit_limit = int(response_object.headers['X-RateLimit-Limit'])
try:
ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset'])
self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime)
# Adjust server time to local clock
if 'Date' in response_object.headers:
server_time_datetime = dateutil.parser.parse(response_object.headers['Date'])
server_time = self.__datetime_to_epoch(server_time_datetime)
server_time_diff = time.time() - server_time
self.ratelimit_reset += server_time_diff
self.ratelimit_lastcall = time.time()
except Exception as e:
raise MastodonRatelimitError("Rate limit time calculations failed: %s" % e)
2016-11-25 18:17:39 +01:00
# Handle response
2017-09-05 22:59:32 +02:00
if self.debug_requests:
2016-11-25 18:17:39 +01:00
print('Mastodon: Response received with code ' + str(response_object.status_code) + '.')
2017-04-01 15:04:45 +02:00
print('response headers: ' + str(response_object.headers))
2016-11-25 18:17:39 +01:00
print('Response text content: ' + str(response_object.text))
2018-01-03 11:34:45 +01:00
if not response_object.ok:
2017-09-08 16:27:16 +02:00
try:
2018-01-03 11:34:45 +01:00
response = response_object.json(object_hook=self.__json_hooks)
if isinstance(response, dict) and 'error' in response:
error_msg = response['error']
elif isinstance(response, str):
error_msg = response
else:
2018-01-03 11:34:45 +01:00
error_msg = None
except ValueError:
error_msg = None
# Handle rate limiting
if response_object.status_code == 429:
if self.ratelimit_method == 'throw' or not do_ratelimiting:
raise MastodonRatelimitError('Hit rate limit.')
elif self.ratelimit_method in ('wait', 'pace'):
to_next = self.ratelimit_reset - time.time()
if to_next > 0:
# As a precaution, never sleep longer than 5 minutes
to_next = min(to_next, 5 * 60)
time.sleep(to_next)
request_complete = False
continue
if response_object.status_code == 404:
ex_type = MastodonNotFoundError
if not error_msg:
error_msg = 'Endpoint not found.'
# this is for compatibility with older versions
# which raised MastodonAPIError('Endpoint not found.')
# on any 404
elif response_object.status_code == 401:
ex_type = MastodonUnauthorizedError
elif response_object.status_code == 500:
ex_type = MastodonInternalServerError
elif response_object.status_code == 502:
ex_type = MastodonBadGatewayError
elif response_object.status_code == 503:
ex_type = MastodonServiceUnavailableError
elif response_object.status_code == 504:
ex_type = MastodonGatewayTimeoutError
elif response_object.status_code >= 500 and \
response_object.status_code <= 511:
ex_type = MastodonServerError
2017-09-08 16:27:16 +02:00
else:
2018-01-03 11:34:45 +01:00
ex_type = MastodonAPIError
raise ex_type(
'Mastodon API returned error',
response_object.status_code,
response_object.reason,
error_msg)
2019-06-22 16:41:32 +02:00
if parse == True:
try:
response = response_object.json(object_hook=self.__json_hooks)
except:
raise MastodonAPIError(
"Could not parse response as JSON, response code was %s, "
"bad json content was '%s'" % (response_object.status_code,
response_object.content))
else:
response = response_object.content
2017-06-16 01:23:19 +02:00
# Parse link headers
2017-09-05 22:59:32 +02:00
if isinstance(response, list) and \
'Link' in response_object.headers and \
response_object.headers['Link'] != "":
tmp_urls = requests.utils.parse_header_links(
response_object.headers['Link'].rstrip('>').replace('>,<', ',<'))
2017-06-16 01:23:19 +02:00
for url in tmp_urls:
2017-09-05 23:07:24 +02:00
if 'rel' not in url:
continue
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if url['rel'] == 'next':
# Be paranoid and extract max_id specifically
next_url = url['url']
matchgroups = re.search(r"[?&]max_id=([^&]+)", next_url)
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if matchgroups:
next_params = copy.deepcopy(params)
next_params['_pagination_method'] = method
next_params['_pagination_endpoint'] = endpoint
max_id = matchgroups.group(1)
2019-04-15 14:19:45 +02:00
if max_id.isdigit():
next_params['max_id'] = int(max_id)
else:
next_params['max_id'] = max_id
if "since_id" in next_params:
del next_params['since_id']
if "min_id" in next_params:
del next_params['min_id']
2018-06-04 16:48:20 +02:00
response[-1]._pagination_next = next_params
2017-09-05 22:59:32 +02:00
2017-06-16 01:23:19 +02:00
if url['rel'] == 'prev':
# Be paranoid and extract since_id or min_id specifically
2017-06-16 01:23:19 +02:00
prev_url = url['url']
# Old and busted (pre-2.6.0): since_id pagination
matchgroups = re.search(r"[?&]since_id=([^&]+)", prev_url)
2017-06-16 01:23:19 +02:00
if matchgroups:
prev_params = copy.deepcopy(params)
prev_params['_pagination_method'] = method
prev_params['_pagination_endpoint'] = endpoint
since_id = matchgroups.group(1)
2019-04-15 14:19:45 +02:00
if since_id.isdigit():
prev_params['since_id'] = int(since_id)
else:
prev_params['since_id'] = since_id
if "max_id" in prev_params:
del prev_params['max_id']
2018-06-04 16:48:20 +02:00
response[0]._pagination_prev = prev_params
# New and fantastico (post-2.6.0): min_id pagination
matchgroups = re.search(r"[?&]min_id=([^&]+)", prev_url)
if matchgroups:
prev_params = copy.deepcopy(params)
prev_params['_pagination_method'] = method
prev_params['_pagination_endpoint'] = endpoint
min_id = matchgroups.group(1)
if min_id.isdigit():
prev_params['min_id'] = int(min_id)
else:
prev_params['min_id'] = min_id
if "max_id" in prev_params:
del prev_params['max_id']
response[0]._pagination_prev = prev_params
2016-11-24 12:03:42 +01:00
return response
2019-06-22 16:41:32 +02:00
def __get_streaming_base(self):
"""
Internal streaming API helper.
2019-06-22 16:41:32 +02:00
Returns the correct URL for the streaming API.
"""
instance = self.instance()
if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url:
# This is probably a websockets URL, which is really for the browser, but requests can't handle it
# So we do this below to turn it into an HTTPS or HTTP URL
parse = urlparse(instance["urls"]["streaming_api"])
if parse.scheme == 'wss':
url = "https://" + parse.netloc
elif parse.scheme == 'ws':
url = "http://" + parse.netloc
else:
raise MastodonAPIError(
"Could not parse streaming api location returned from server: {}.".format(
instance["urls"]["streaming_api"]))
else:
url = self.api_base_url
2019-06-22 16:41:32 +02:00
return url
def __stream(self, endpoint, listener, params={}, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC):
"""
Internal streaming API helper.
Returns a handle to the open connection that the user can close if they
wish to terminate it.
"""
# Check if we have to redirect
url = self.__get_streaming_base()
# The streaming server can't handle two slashes in a path, so remove trailing slashes
if url[-1] == '/':
url = url[:-1]
2018-04-17 14:35:09 +02:00
# Connect function (called and then potentially passed to async handler)
def connect_func():
headers = {"Authorization": "Bearer " + self.access_token} if self.access_token else {}
connection = self.session.get(url + endpoint, headers = headers, data = params, stream = True,
2018-04-08 23:06:09 +02:00
timeout=(self.request_timeout, timeout))
2018-04-17 14:35:09 +02:00
if connection.status_code != 200:
raise MastodonNetworkError("Could not connect to streaming server: %s" % connection.reason)
return connection
connection = None
2018-04-17 14:35:09 +02:00
# Async stream handler
class __stream_handle():
2018-04-17 14:35:09 +02:00
def __init__(self, connection, connect_func, reconnect_async, reconnect_async_wait_sec):
self.closed = False
2018-04-17 14:35:09 +02:00
self.running = True
self.connection = connection
2018-04-17 14:35:09 +02:00
self.connect_func = connect_func
self.reconnect_async = reconnect_async
self.reconnect_async_wait_sec = reconnect_async_wait_sec
2018-04-17 14:59:45 +02:00
self.reconnecting = False
2018-04-17 14:35:09 +02:00
def close(self):
self.closed = True
self.connection.close()
2017-11-24 18:45:47 +01:00
def is_alive(self):
return self._thread.is_alive()
2018-04-17 14:59:45 +02:00
def is_receiving(self):
if self.closed or not self.running or self.reconnecting or not self.is_alive():
return False
else:
return True
def _threadproc(self):
2017-11-24 19:09:38 +01:00
self._thread = threading.current_thread()
2018-04-17 14:35:09 +02:00
# Run until closed or until error if not autoreconnecting
while self.running:
if not self.connection is None:
with closing(self.connection) as r:
try:
listener.handle_stream(r)
except (AttributeError, MastodonMalformedEventError, MastodonNetworkError) as e:
if not (self.closed or self.reconnect_async):
raise e
else:
if self.closed:
self.running = False
2018-04-17 14:35:09 +02:00
# Reconnect loop. Try immediately once, then with delays on error.
if (self.reconnect_async and not self.closed) or self.connection is None:
2018-04-17 14:59:45 +02:00
self.reconnecting = True
2018-04-17 14:35:09 +02:00
connect_success = False
while not connect_success:
connect_success = True
try:
self.connection = self.connect_func()
if self.connection.status_code != 200:
time.sleep(self.reconnect_async_wait_sec)
connect_success = False
exception = MastodonNetworkError("Could not connect to server.")
listener.on_abort(exception)
2018-04-17 14:35:09 +02:00
except:
time.sleep(self.reconnect_async_wait_sec)
connect_success = False
2018-04-17 14:59:45 +02:00
self.reconnecting = False
2018-04-17 14:35:09 +02:00
else:
self.running = False
return 0
2018-04-17 15:38:21 +02:00
if run_async:
2018-04-17 14:35:09 +02:00
handle = __stream_handle(connection, connect_func, reconnect_async, reconnect_async_wait_sec)
2018-05-06 01:45:02 +02:00
t = threading.Thread(args=(), target=handle._threadproc)
t.daemon = True
t.start()
return handle
else:
# Blocking, never returns (can only leave via exception)
connection = connect_func()
with closing(connection) as r:
listener.handle_stream(r)
2017-09-05 22:59:32 +02:00
def __generate_params(self, params, exclude=[]):
2016-11-24 00:36:00 +01:00
"""
2016-11-24 00:55:09 +01:00
Internal named-parameters-to-dict helper.
Note for developers: If called with locals() as params,
as is the usual practice in this code, the __generate_params call
(or at least the locals() call) should generally be the first thing
in your function.
2016-11-24 00:55:09 +01:00
"""
2018-06-05 22:10:31 +02:00
params = collections.OrderedDict(params)
2016-11-23 23:30:51 +01:00
del params['self']
param_keys = list(params.keys())
2018-07-30 23:08:30 +02:00
for key in param_keys:
if isinstance(params[key], bool) and params[key] == False:
params[key] = '0'
if isinstance(params[key], bool) and params[key] == True:
params[key] = '1'
2016-11-23 23:30:51 +01:00
for key in param_keys:
2017-09-05 22:59:32 +02:00
if params[key] is None or key in exclude:
2016-11-23 23:30:51 +01:00
del params[key]
2016-11-23 23:30:51 +01:00
param_keys = list(params.keys())
for key in param_keys:
if isinstance(params[key], list):
params[key + "[]"] = params[key]
del params[key]
2018-07-30 23:08:30 +02:00
2016-11-23 23:30:51 +01:00
return params
2017-11-22 10:26:44 +01:00
def __unpack_id(self, id):
"""
Internal object-to-id converter
Checks if id is a dict that contains id and
returns the id inside, otherwise just returns
the id straight.
"""
if isinstance(id, dict) and "id" in id:
return id["id"]
else:
return id
2018-06-05 14:10:53 +02:00
def __decode_webpush_b64(self, data):
"""
Re-pads and decodes urlsafe base64.
"""
missing_padding = len(data) % 4
if missing_padding != 0:
data += '=' * (4 - missing_padding)
return base64.urlsafe_b64decode(data)
2017-04-26 12:20:44 +02:00
def __get_token_expired(self):
"""Internal helper for oauth code"""
2017-09-05 22:59:32 +02:00
return self._token_expired < datetime.datetime.now()
2017-04-26 12:20:44 +02:00
def __set_token_expired(self, value):
"""Internal helper for oauth code"""
self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value)
return
2017-09-05 22:59:32 +02:00
2017-04-26 12:20:44 +02:00
def __get_refresh_token(self):
"""Internal helper for oauth code"""
return self._refresh_token
2017-09-05 22:59:32 +02:00
2017-04-26 12:20:44 +02:00
def __set_refresh_token(self, value):
"""Internal helper for oauth code"""
self._refresh_token = value
return
2017-12-11 14:00:14 +01:00
@staticmethod
def __protocolize(base_url):
"""Internal add-protocol-to-url helper"""
if not base_url.startswith("http://") and not base_url.startswith("https://"):
base_url = "https://" + base_url
# Some API endpoints can't handle extra /'s in path requests
base_url = base_url.rstrip("/")
return base_url
2017-04-26 12:20:44 +02:00
2017-09-05 22:59:32 +02:00
2016-11-25 18:17:39 +01:00
##
# Exceptions
##
class MastodonError(Exception):
"""Base class for Mastodon.py exceptions"""
2017-12-11 11:49:14 +01:00
class MastodonVersionError(MastodonError):
"""Raised when a function is called that the version of Mastodon for which
Mastodon.py was instantiated does not support"""
class MastodonIllegalArgumentError(ValueError, MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when an incorrect parameter is passed to a function"""
2016-11-25 18:17:39 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonIOError(IOError, MastodonError):
"""Base class for Mastodon.py I/O errors"""
class MastodonFileNotFoundError(MastodonIOError):
2017-11-24 14:20:27 +01:00
"""Raised when a file requested to be loaded can not be opened"""
2016-11-25 23:14:00 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonNetworkError(MastodonIOError):
2017-11-24 14:20:27 +01:00
"""Raised when network communication with the server fails"""
2016-11-25 18:17:39 +01:00
pass
class MastodonReadTimeout(MastodonNetworkError):
"""Raised when a stream times out"""
pass
2017-09-05 22:59:32 +02:00
class MastodonAPIError(MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when the mastodon API generates a response that cannot be handled"""
2016-11-25 18:17:39 +01:00
pass
class MastodonServerError(MastodonAPIError):
"""Raised if the Server is malconfigured and returns a 5xx error code"""
pass
2019-06-05 11:44:20 +02:00
class MastodonInternalServerError(MastodonServerError):
"""Raised if the Server returns a 500 error"""
pass
2019-06-05 11:44:20 +02:00
class MastodonBadGatewayError(MastodonServerError):
"""Raised if the Server returns a 502 error"""
pass
2019-06-05 11:44:20 +02:00
class MastodonServiceUnavailableError(MastodonServerError):
"""Raised if the Server returns a 503 error"""
pass
2019-06-05 11:44:20 +02:00
class MastodonGatewayTimeoutError(MastodonServerError):
"""Raised if the Server returns a 504 error"""
pass
2018-01-03 11:34:45 +01:00
class MastodonNotFoundError(MastodonAPIError):
"""Raised when the mastodon API returns a 404 Not Found error"""
pass
class MastodonUnauthorizedError(MastodonAPIError):
"""Raised when the mastodon API returns a 401 Unauthorized error
This happens when an OAuth token is invalid or has been revoked,
or when trying to access an endpoint that can't be used without
authentication without providing credentials."""
2018-01-03 11:34:45 +01:00
pass
2017-09-05 22:59:32 +02:00
class MastodonRatelimitError(MastodonError):
2017-11-24 14:20:27 +01:00
"""Raised when rate limiting is set to manual mode and the rate limit is exceeded"""
pass
class MastodonMalformedEventError(MastodonError):
"""Raised when the server-sent event stream is malformed"""
2016-11-25 18:17:39 +01:00
pass
def guess_type(media_file):
mime_type = None
if magic:
mime_type = magic.from_file(media_file, mime=True)
else:
mime_type = mimetypes.guess_type(media_file)[0]
return mime_type