mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-16 02:02:58 -07:00
Include posters in Twitter notifications
* Also cleanup Facebook
This commit is contained in:
parent
6f33d29a51
commit
acc18b8d68
25 changed files with 6970 additions and 4745 deletions
File diff suppressed because it is too large
Load diff
22
lib/requests_oauthlib/__init__.py
Normal file
22
lib/requests_oauthlib/__init__.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
from .oauth1_auth import OAuth1
|
||||||
|
from .oauth1_session import OAuth1Session
|
||||||
|
from .oauth2_auth import OAuth2
|
||||||
|
from .oauth2_session import OAuth2Session, TokenUpdated
|
||||||
|
|
||||||
|
__version__ = '0.6.1'
|
||||||
|
|
||||||
|
import requests
|
||||||
|
if requests.__version__ < '2.0.0':
|
||||||
|
msg = ('You are using requests version %s, which is older than '
|
||||||
|
'requests-oauthlib expects, please upgrade to 2.0.0 or later.')
|
||||||
|
raise Warning(msg % requests.__version__)
|
||||||
|
|
||||||
|
import logging
|
||||||
|
try: # Python 2.7+
|
||||||
|
from logging import NullHandler
|
||||||
|
except ImportError:
|
||||||
|
class NullHandler(logging.Handler):
|
||||||
|
def emit(self, record):
|
||||||
|
pass
|
||||||
|
|
||||||
|
logging.getLogger('requests_oauthlib').addHandler(NullHandler())
|
7
lib/requests_oauthlib/compliance_fixes/__init__.py
Normal file
7
lib/requests_oauthlib/compliance_fixes/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
from .facebook import facebook_compliance_fix
|
||||||
|
from .linkedin import linkedin_compliance_fix
|
||||||
|
from .slack import slack_compliance_fix
|
||||||
|
from .mailchimp import mailchimp_compliance_fix
|
||||||
|
from .weibo import weibo_compliance_fix
|
18
lib/requests_oauthlib/compliance_fixes/douban.py
Normal file
18
lib/requests_oauthlib/compliance_fixes/douban.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from oauthlib.common import to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def douban_compliance_fix(session):
|
||||||
|
|
||||||
|
def fix_token_type(r):
|
||||||
|
token = json.loads(r.text)
|
||||||
|
token.setdefault('token_type', 'Bearer')
|
||||||
|
fixed_token = json.dumps(token)
|
||||||
|
r._content = to_unicode(fixed_token).encode('utf-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
session._client_default_token_placement = 'query'
|
||||||
|
session.register_compliance_hook('access_token_response', fix_token_type)
|
||||||
|
|
||||||
|
return session
|
33
lib/requests_oauthlib/compliance_fixes/facebook.py
Normal file
33
lib/requests_oauthlib/compliance_fixes/facebook.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
from json import dumps
|
||||||
|
try:
|
||||||
|
from urlparse import parse_qsl
|
||||||
|
except ImportError:
|
||||||
|
from urllib.parse import parse_qsl
|
||||||
|
|
||||||
|
from oauthlib.common import to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def facebook_compliance_fix(session):
|
||||||
|
|
||||||
|
def _compliance_fix(r):
|
||||||
|
# if Facebook claims to be sending us json, let's trust them.
|
||||||
|
if 'application/json' in r.headers.get('content-type', {}):
|
||||||
|
return r
|
||||||
|
|
||||||
|
# Facebook returns a content-type of text/plain when sending their
|
||||||
|
# x-www-form-urlencoded responses, along with a 200. If not, let's
|
||||||
|
# assume we're getting JSON and bail on the fix.
|
||||||
|
if 'text/plain' in r.headers.get('content-type', {}) and r.status_code == 200:
|
||||||
|
token = dict(parse_qsl(r.text, keep_blank_values=True))
|
||||||
|
else:
|
||||||
|
return r
|
||||||
|
|
||||||
|
expires = token.get('expires')
|
||||||
|
if expires is not None:
|
||||||
|
token['expires_in'] = expires
|
||||||
|
token['token_type'] = 'Bearer'
|
||||||
|
r._content = to_unicode(dumps(token)).encode('UTF-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
session.register_compliance_hook('access_token_response', _compliance_fix)
|
||||||
|
return session
|
24
lib/requests_oauthlib/compliance_fixes/linkedin.py
Normal file
24
lib/requests_oauthlib/compliance_fixes/linkedin.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
from json import loads, dumps
|
||||||
|
|
||||||
|
from oauthlib.common import add_params_to_uri, to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def linkedin_compliance_fix(session):
|
||||||
|
|
||||||
|
def _missing_token_type(r):
|
||||||
|
token = loads(r.text)
|
||||||
|
token['token_type'] = 'Bearer'
|
||||||
|
r._content = to_unicode(dumps(token)).encode('UTF-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
def _non_compliant_param_name(url, headers, data):
|
||||||
|
token = [('oauth2_access_token', session.access_token)]
|
||||||
|
url = add_params_to_uri(url, token)
|
||||||
|
return url, headers, data
|
||||||
|
|
||||||
|
session._client.default_token_placement = 'query'
|
||||||
|
session.register_compliance_hook('access_token_response',
|
||||||
|
_missing_token_type)
|
||||||
|
session.register_compliance_hook('protected_request',
|
||||||
|
_non_compliant_param_name)
|
||||||
|
return session
|
22
lib/requests_oauthlib/compliance_fixes/mailchimp.py
Normal file
22
lib/requests_oauthlib/compliance_fixes/mailchimp.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import json
|
||||||
|
|
||||||
|
from oauthlib.common import to_unicode
|
||||||
|
|
||||||
|
def mailchimp_compliance_fix(session):
|
||||||
|
def _null_scope(r):
|
||||||
|
token = json.loads(r.text)
|
||||||
|
if 'scope' in token and token['scope'] is None:
|
||||||
|
token.pop('scope')
|
||||||
|
r._content = to_unicode(json.dumps(token)).encode('utf-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
def _non_zero_expiration(r):
|
||||||
|
token = json.loads(r.text)
|
||||||
|
if 'expires_in' in token and token['expires_in'] == 0:
|
||||||
|
token['expires_in'] = 3600
|
||||||
|
r._content = to_unicode(json.dumps(token)).encode('utf-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
session.register_compliance_hook('access_token_response', _null_scope)
|
||||||
|
session.register_compliance_hook('access_token_response', _non_zero_expiration)
|
||||||
|
return session
|
37
lib/requests_oauthlib/compliance_fixes/slack.py
Normal file
37
lib/requests_oauthlib/compliance_fixes/slack.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
try:
|
||||||
|
from urlparse import urlparse, parse_qs
|
||||||
|
except ImportError:
|
||||||
|
from urllib.parse import urlparse, parse_qs
|
||||||
|
|
||||||
|
from oauthlib.common import add_params_to_uri
|
||||||
|
|
||||||
|
|
||||||
|
def slack_compliance_fix(session):
|
||||||
|
def _non_compliant_param_name(url, headers, data):
|
||||||
|
# If the user has already specified the token, either in the URL
|
||||||
|
# or in a data dictionary, then there's nothing to do.
|
||||||
|
# If the specified token is different from ``session.access_token``,
|
||||||
|
# we assume the user intends to override the access token.
|
||||||
|
url_query = dict(parse_qs(urlparse(url).query))
|
||||||
|
token = url_query.get("token")
|
||||||
|
if not token and isinstance(data, dict):
|
||||||
|
token = data.get("token")
|
||||||
|
|
||||||
|
if token:
|
||||||
|
# Nothing to do, just return.
|
||||||
|
return url, headers, data
|
||||||
|
|
||||||
|
if not data:
|
||||||
|
data = {"token": session.access_token}
|
||||||
|
elif isinstance(data, dict):
|
||||||
|
data["token"] = session.access_token
|
||||||
|
else:
|
||||||
|
# ``data`` is something other than a dict: maybe a stream,
|
||||||
|
# maybe a file object, maybe something else. We can't easily
|
||||||
|
# modify it, so we'll set the token by modifying the URL instead.
|
||||||
|
token = [('token', session.access_token)]
|
||||||
|
url = add_params_to_uri(url, token)
|
||||||
|
return url, headers, data
|
||||||
|
|
||||||
|
session.register_compliance_hook('protected_request', _non_compliant_param_name)
|
||||||
|
return session
|
17
lib/requests_oauthlib/compliance_fixes/weibo.py
Normal file
17
lib/requests_oauthlib/compliance_fixes/weibo.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
from json import loads, dumps
|
||||||
|
|
||||||
|
from oauthlib.common import to_unicode
|
||||||
|
|
||||||
|
|
||||||
|
def weibo_compliance_fix(session):
|
||||||
|
|
||||||
|
def _missing_token_type(r):
|
||||||
|
token = loads(r.text)
|
||||||
|
token['token_type'] = 'Bearer'
|
||||||
|
r._content = to_unicode(dumps(token)).encode('UTF-8')
|
||||||
|
return r
|
||||||
|
|
||||||
|
session._client.default_token_placement = 'query'
|
||||||
|
session.register_compliance_hook('access_token_response',
|
||||||
|
_missing_token_type)
|
||||||
|
return session
|
95
lib/requests_oauthlib/oauth1_auth.py
Normal file
95
lib/requests_oauthlib/oauth1_auth.py
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from oauthlib.common import extract_params
|
||||||
|
from oauthlib.oauth1 import Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER
|
||||||
|
from oauthlib.oauth1 import SIGNATURE_TYPE_BODY
|
||||||
|
from requests.compat import is_py3
|
||||||
|
from requests.utils import to_native_string
|
||||||
|
from requests.auth import AuthBase
|
||||||
|
|
||||||
|
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
||||||
|
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
|
||||||
|
|
||||||
|
if is_py3:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# OBS!: Correct signing of requests are conditional on invoking OAuth1
|
||||||
|
# as the last step of preparing a request, or at least having the
|
||||||
|
# content-type set properly.
|
||||||
|
class OAuth1(AuthBase):
|
||||||
|
"""Signs the request using OAuth 1 (RFC5849)"""
|
||||||
|
|
||||||
|
client_class = Client
|
||||||
|
|
||||||
|
def __init__(self, client_key,
|
||||||
|
client_secret=None,
|
||||||
|
resource_owner_key=None,
|
||||||
|
resource_owner_secret=None,
|
||||||
|
callback_uri=None,
|
||||||
|
signature_method=SIGNATURE_HMAC,
|
||||||
|
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
|
||||||
|
rsa_key=None, verifier=None,
|
||||||
|
decoding='utf-8',
|
||||||
|
client_class=None,
|
||||||
|
force_include_body=False,
|
||||||
|
**kwargs):
|
||||||
|
|
||||||
|
try:
|
||||||
|
signature_type = signature_type.upper()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
client_class = client_class or self.client_class
|
||||||
|
|
||||||
|
self.force_include_body = force_include_body
|
||||||
|
|
||||||
|
self.client = client_class(client_key, client_secret, resource_owner_key,
|
||||||
|
resource_owner_secret, callback_uri, signature_method,
|
||||||
|
signature_type, rsa_key, verifier, decoding=decoding, **kwargs)
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
"""Add OAuth parameters to the request.
|
||||||
|
|
||||||
|
Parameters may be included from the body if the content-type is
|
||||||
|
urlencoded, if no content type is set a guess is made.
|
||||||
|
"""
|
||||||
|
# Overwriting url is safe here as request will not modify it past
|
||||||
|
# this point.
|
||||||
|
log.debug('Signing request %s using client %s', r, self.client)
|
||||||
|
|
||||||
|
content_type = r.headers.get('Content-Type', '')
|
||||||
|
if (not content_type and extract_params(r.body)
|
||||||
|
or self.client.signature_type == SIGNATURE_TYPE_BODY):
|
||||||
|
content_type = CONTENT_TYPE_FORM_URLENCODED
|
||||||
|
if not isinstance(content_type, unicode):
|
||||||
|
content_type = content_type.decode('utf-8')
|
||||||
|
|
||||||
|
is_form_encoded = (CONTENT_TYPE_FORM_URLENCODED in content_type)
|
||||||
|
|
||||||
|
log.debug('Including body in call to sign: %s',
|
||||||
|
is_form_encoded or self.force_include_body)
|
||||||
|
|
||||||
|
if is_form_encoded:
|
||||||
|
r.headers['Content-Type'] = CONTENT_TYPE_FORM_URLENCODED
|
||||||
|
r.url, headers, r.body = self.client.sign(
|
||||||
|
unicode(r.url), unicode(r.method), r.body or '', r.headers)
|
||||||
|
elif self.force_include_body:
|
||||||
|
# To allow custom clients to work on non form encoded bodies.
|
||||||
|
r.url, headers, r.body = self.client.sign(
|
||||||
|
unicode(r.url), unicode(r.method), r.body or '', r.headers)
|
||||||
|
else:
|
||||||
|
# Omit body data in the signing of non form-encoded requests
|
||||||
|
r.url, headers, _ = self.client.sign(
|
||||||
|
unicode(r.url), unicode(r.method), None, r.headers)
|
||||||
|
|
||||||
|
r.prepare_headers(headers)
|
||||||
|
r.url = to_native_string(r.url)
|
||||||
|
log.debug('Updated url: %s', r.url)
|
||||||
|
log.debug('Updated headers: %s', headers)
|
||||||
|
log.debug('Updated body: %r', r.body)
|
||||||
|
return r
|
378
lib/requests_oauthlib/oauth1_session.py
Normal file
378
lib/requests_oauthlib/oauth1_session.py
Normal file
|
@ -0,0 +1,378 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
try:
|
||||||
|
from urlparse import urlparse
|
||||||
|
except ImportError:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from oauthlib.common import add_params_to_uri
|
||||||
|
from oauthlib.common import urldecode as _urldecode
|
||||||
|
from oauthlib.oauth1 import (
|
||||||
|
SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_TYPE_AUTH_HEADER
|
||||||
|
)
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from . import OAuth1
|
||||||
|
|
||||||
|
import sys
|
||||||
|
if sys.version > "3":
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def urldecode(body):
|
||||||
|
"""Parse query or json to python dictionary"""
|
||||||
|
try:
|
||||||
|
return _urldecode(body)
|
||||||
|
except:
|
||||||
|
import json
|
||||||
|
return json.loads(body)
|
||||||
|
|
||||||
|
|
||||||
|
class TokenRequestDenied(ValueError):
|
||||||
|
|
||||||
|
def __init__(self, message, response):
|
||||||
|
super(TokenRequestDenied, self).__init__(message)
|
||||||
|
self.response = response
|
||||||
|
|
||||||
|
@property
|
||||||
|
def status_code(self):
|
||||||
|
"""For backwards-compatibility purposes"""
|
||||||
|
return self.response.status_code
|
||||||
|
|
||||||
|
|
||||||
|
class TokenMissing(ValueError):
|
||||||
|
def __init__(self, message, response):
|
||||||
|
super(TokenMissing, self).__init__(message)
|
||||||
|
self.response = response
|
||||||
|
|
||||||
|
|
||||||
|
class VerifierMissing(ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class OAuth1Session(requests.Session):
|
||||||
|
"""Request signing and convenience methods for the oauth dance.
|
||||||
|
|
||||||
|
What is the difference between OAuth1Session and OAuth1?
|
||||||
|
|
||||||
|
OAuth1Session actually uses OAuth1 internally and its purpose is to assist
|
||||||
|
in the OAuth workflow through convenience methods to prepare authorization
|
||||||
|
URLs and parse the various token and redirection responses. It also provide
|
||||||
|
rudimentary validation of responses.
|
||||||
|
|
||||||
|
An example of the OAuth workflow using a basic CLI app and Twitter.
|
||||||
|
|
||||||
|
>>> # Credentials obtained during the registration.
|
||||||
|
>>> client_key = 'client key'
|
||||||
|
>>> client_secret = 'secret'
|
||||||
|
>>> callback_uri = 'https://127.0.0.1/callback'
|
||||||
|
>>>
|
||||||
|
>>> # Endpoints found in the OAuth provider API documentation
|
||||||
|
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
|
||||||
|
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
|
||||||
|
>>> access_token_url = 'https://api.twitter.com/oauth/access_token'
|
||||||
|
>>>
|
||||||
|
>>> oauth_session = OAuth1Session(client_key,client_secret=client_secret, callback_uri=callback_uri)
|
||||||
|
>>>
|
||||||
|
>>> # First step, fetch the request token.
|
||||||
|
>>> oauth_session.fetch_request_token(request_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'kjerht2309u',
|
||||||
|
'oauth_token_secret': 'lsdajfh923874',
|
||||||
|
}
|
||||||
|
>>>
|
||||||
|
>>> # Second step. Follow this link and authorize
|
||||||
|
>>> oauth_session.authorization_url(authorization_url)
|
||||||
|
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
|
||||||
|
>>>
|
||||||
|
>>> # Third step. Fetch the access token
|
||||||
|
>>> redirect_response = raw_input('Paste the full redirect URL here.')
|
||||||
|
>>> oauth_session.parse_authorization_response(redirect_response)
|
||||||
|
{
|
||||||
|
'oauth_token: 'kjerht2309u',
|
||||||
|
'oauth_token_secret: 'lsdajfh923874',
|
||||||
|
'oauth_verifier: 'w34o8967345',
|
||||||
|
}
|
||||||
|
>>> oauth_session.fetch_access_token(access_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'sdf0o9823sjdfsdf',
|
||||||
|
'oauth_token_secret': '2kjshdfp92i34asdasd',
|
||||||
|
}
|
||||||
|
>>> # Done. You can now make OAuth requests.
|
||||||
|
>>> status_url = 'http://api.twitter.com/1/statuses/update.json'
|
||||||
|
>>> new_status = {'status': 'hello world!'}
|
||||||
|
>>> oauth_session.post(status_url, data=new_status)
|
||||||
|
<Response [200]>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, client_key,
|
||||||
|
client_secret=None,
|
||||||
|
resource_owner_key=None,
|
||||||
|
resource_owner_secret=None,
|
||||||
|
callback_uri=None,
|
||||||
|
signature_method=SIGNATURE_HMAC,
|
||||||
|
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
|
||||||
|
rsa_key=None,
|
||||||
|
verifier=None,
|
||||||
|
client_class=None,
|
||||||
|
force_include_body=False,
|
||||||
|
**kwargs):
|
||||||
|
"""Construct the OAuth 1 session.
|
||||||
|
|
||||||
|
:param client_key: A client specific identifier.
|
||||||
|
:param client_secret: A client specific secret used to create HMAC and
|
||||||
|
plaintext signatures.
|
||||||
|
:param resource_owner_key: A resource owner key, also referred to as
|
||||||
|
request token or access token depending on
|
||||||
|
when in the workflow it is used.
|
||||||
|
:param resource_owner_secret: A resource owner secret obtained with
|
||||||
|
either a request or access token. Often
|
||||||
|
referred to as token secret.
|
||||||
|
:param callback_uri: The URL the user is redirect back to after
|
||||||
|
authorization.
|
||||||
|
:param signature_method: Signature methods determine how the OAuth
|
||||||
|
signature is created. The three options are
|
||||||
|
oauthlib.oauth1.SIGNATURE_HMAC (default),
|
||||||
|
oauthlib.oauth1.SIGNATURE_RSA and
|
||||||
|
oauthlib.oauth1.SIGNATURE_PLAIN.
|
||||||
|
:param signature_type: Signature type decides where the OAuth
|
||||||
|
parameters are added. Either in the
|
||||||
|
Authorization header (default) or to the URL
|
||||||
|
query parameters or the request body. Defined as
|
||||||
|
oauthlib.oauth1.SIGNATURE_TYPE_AUTH_HEADER,
|
||||||
|
oauthlib.oauth1.SIGNATURE_TYPE_QUERY and
|
||||||
|
oauthlib.oauth1.SIGNATURE_TYPE_BODY
|
||||||
|
respectively.
|
||||||
|
:param rsa_key: The private RSA key as a string. Can only be used with
|
||||||
|
signature_method=oauthlib.oauth1.SIGNATURE_RSA.
|
||||||
|
:param verifier: A verifier string to prove authorization was granted.
|
||||||
|
:param client_class: A subclass of `oauthlib.oauth1.Client` to use with
|
||||||
|
`requests_oauthlib.OAuth1` instead of the default
|
||||||
|
:param force_include_body: Always include the request body in the
|
||||||
|
signature creation.
|
||||||
|
:param **kwargs: Additional keyword arguments passed to `OAuth1`
|
||||||
|
"""
|
||||||
|
super(OAuth1Session, self).__init__()
|
||||||
|
self._client = OAuth1(client_key,
|
||||||
|
client_secret=client_secret,
|
||||||
|
resource_owner_key=resource_owner_key,
|
||||||
|
resource_owner_secret=resource_owner_secret,
|
||||||
|
callback_uri=callback_uri,
|
||||||
|
signature_method=signature_method,
|
||||||
|
signature_type=signature_type,
|
||||||
|
rsa_key=rsa_key,
|
||||||
|
verifier=verifier,
|
||||||
|
client_class=client_class,
|
||||||
|
force_include_body=force_include_body,
|
||||||
|
**kwargs)
|
||||||
|
self.auth = self._client
|
||||||
|
|
||||||
|
@property
|
||||||
|
def authorized(self):
|
||||||
|
"""Boolean that indicates whether this session has an OAuth token
|
||||||
|
or not. If `self.authorized` is True, you can reasonably expect
|
||||||
|
OAuth-protected requests to the resource to succeed. If
|
||||||
|
`self.authorized` is False, you need the user to go through the OAuth
|
||||||
|
authentication dance before OAuth-protected requests to the resource
|
||||||
|
will succeed.
|
||||||
|
"""
|
||||||
|
if self._client.client.signature_method == SIGNATURE_RSA:
|
||||||
|
# RSA only uses resource_owner_key
|
||||||
|
return bool(self._client.client.resource_owner_key)
|
||||||
|
else:
|
||||||
|
# other methods of authentication use all three pieces
|
||||||
|
return (
|
||||||
|
bool(self._client.client.client_secret) and
|
||||||
|
bool(self._client.client.resource_owner_key) and
|
||||||
|
bool(self._client.client.resource_owner_secret)
|
||||||
|
)
|
||||||
|
|
||||||
|
def authorization_url(self, url, request_token=None, **kwargs):
|
||||||
|
"""Create an authorization URL by appending request_token and optional
|
||||||
|
kwargs to url.
|
||||||
|
|
||||||
|
This is the second step in the OAuth 1 workflow. The user should be
|
||||||
|
redirected to this authorization URL, grant access to you, and then
|
||||||
|
be redirected back to you. The redirection back can either be specified
|
||||||
|
during client registration or by supplying a callback URI per request.
|
||||||
|
|
||||||
|
:param url: The authorization endpoint URL.
|
||||||
|
:param request_token: The previously obtained request token.
|
||||||
|
:param kwargs: Optional parameters to append to the URL.
|
||||||
|
:returns: The authorization URL with new parameters embedded.
|
||||||
|
|
||||||
|
An example using a registered default callback URI.
|
||||||
|
|
||||||
|
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
|
||||||
|
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
|
||||||
|
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
|
||||||
|
>>> oauth_session.fetch_request_token(request_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'sdf0o9823sjdfsdf',
|
||||||
|
'oauth_token_secret': '2kjshdfp92i34asdasd',
|
||||||
|
}
|
||||||
|
>>> oauth_session.authorization_url(authorization_url)
|
||||||
|
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf'
|
||||||
|
>>> oauth_session.authorization_url(authorization_url, foo='bar')
|
||||||
|
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&foo=bar'
|
||||||
|
|
||||||
|
An example using an explicit callback URI.
|
||||||
|
|
||||||
|
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
|
||||||
|
>>> authorization_url = 'https://api.twitter.com/oauth/authorize'
|
||||||
|
>>> oauth_session = OAuth1Session('client-key', client_secret='secret', callback_uri='https://127.0.0.1/callback')
|
||||||
|
>>> oauth_session.fetch_request_token(request_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'sdf0o9823sjdfsdf',
|
||||||
|
'oauth_token_secret': '2kjshdfp92i34asdasd',
|
||||||
|
}
|
||||||
|
>>> oauth_session.authorization_url(authorization_url)
|
||||||
|
'https://api.twitter.com/oauth/authorize?oauth_token=sdf0o9823sjdfsdf&oauth_callback=https%3A%2F%2F127.0.0.1%2Fcallback'
|
||||||
|
"""
|
||||||
|
kwargs['oauth_token'] = request_token or self._client.client.resource_owner_key
|
||||||
|
log.debug('Adding parameters %s to url %s', kwargs, url)
|
||||||
|
return add_params_to_uri(url, kwargs.items())
|
||||||
|
|
||||||
|
def fetch_request_token(self, url, realm=None, **request_kwargs):
|
||||||
|
"""Fetch a request token.
|
||||||
|
|
||||||
|
This is the first step in the OAuth 1 workflow. A request token is
|
||||||
|
obtained by making a signed post request to url. The token is then
|
||||||
|
parsed from the application/x-www-form-urlencoded response and ready
|
||||||
|
to be used to construct an authorization url.
|
||||||
|
|
||||||
|
:param url: The request token endpoint URL.
|
||||||
|
:param realm: A list of realms to request access to.
|
||||||
|
:param \*\*request_kwargs: Optional arguments passed to ''post''
|
||||||
|
function in ''requests.Session''
|
||||||
|
:returns: The response in dict format.
|
||||||
|
|
||||||
|
Note that a previously set callback_uri will be reset for your
|
||||||
|
convenience, or else signature creation will be incorrect on
|
||||||
|
consecutive requests.
|
||||||
|
|
||||||
|
>>> request_token_url = 'https://api.twitter.com/oauth/request_token'
|
||||||
|
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
|
||||||
|
>>> oauth_session.fetch_request_token(request_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'sdf0o9823sjdfsdf',
|
||||||
|
'oauth_token_secret': '2kjshdfp92i34asdasd',
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
self._client.client.realm = ' '.join(realm) if realm else None
|
||||||
|
token = self._fetch_token(url, **request_kwargs)
|
||||||
|
log.debug('Resetting callback_uri and realm (not needed in next phase).')
|
||||||
|
self._client.client.callback_uri = None
|
||||||
|
self._client.client.realm = None
|
||||||
|
return token
|
||||||
|
|
||||||
|
def fetch_access_token(self, url, verifier=None, **request_kwargs):
|
||||||
|
"""Fetch an access token.
|
||||||
|
|
||||||
|
This is the final step in the OAuth 1 workflow. An access token is
|
||||||
|
obtained using all previously obtained credentials, including the
|
||||||
|
verifier from the authorization step.
|
||||||
|
|
||||||
|
Note that a previously set verifier will be reset for your
|
||||||
|
convenience, or else signature creation will be incorrect on
|
||||||
|
consecutive requests.
|
||||||
|
|
||||||
|
>>> access_token_url = 'https://api.twitter.com/oauth/access_token'
|
||||||
|
>>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345'
|
||||||
|
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
|
||||||
|
>>> oauth_session.parse_authorization_response(redirect_response)
|
||||||
|
{
|
||||||
|
'oauth_token: 'kjerht2309u',
|
||||||
|
'oauth_token_secret: 'lsdajfh923874',
|
||||||
|
'oauth_verifier: 'w34o8967345',
|
||||||
|
}
|
||||||
|
>>> oauth_session.fetch_access_token(access_token_url)
|
||||||
|
{
|
||||||
|
'oauth_token': 'sdf0o9823sjdfsdf',
|
||||||
|
'oauth_token_secret': '2kjshdfp92i34asdasd',
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
if verifier:
|
||||||
|
self._client.client.verifier = verifier
|
||||||
|
if not getattr(self._client.client, 'verifier', None):
|
||||||
|
raise VerifierMissing('No client verifier has been set.')
|
||||||
|
token = self._fetch_token(url, **request_kwargs)
|
||||||
|
log.debug('Resetting verifier attribute, should not be used anymore.')
|
||||||
|
self._client.client.verifier = None
|
||||||
|
return token
|
||||||
|
|
||||||
|
def parse_authorization_response(self, url):
|
||||||
|
"""Extract parameters from the post authorization redirect response URL.
|
||||||
|
|
||||||
|
:param url: The full URL that resulted from the user being redirected
|
||||||
|
back from the OAuth provider to you, the client.
|
||||||
|
:returns: A dict of parameters extracted from the URL.
|
||||||
|
|
||||||
|
>>> redirect_response = 'https://127.0.0.1/callback?oauth_token=kjerht2309uf&oauth_token_secret=lsdajfh923874&oauth_verifier=w34o8967345'
|
||||||
|
>>> oauth_session = OAuth1Session('client-key', client_secret='secret')
|
||||||
|
>>> oauth_session.parse_authorization_response(redirect_response)
|
||||||
|
{
|
||||||
|
'oauth_token: 'kjerht2309u',
|
||||||
|
'oauth_token_secret: 'lsdajfh923874',
|
||||||
|
'oauth_verifier: 'w34o8967345',
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
log.debug('Parsing token from query part of url %s', url)
|
||||||
|
token = dict(urldecode(urlparse(url).query))
|
||||||
|
log.debug('Updating internal client token attribute.')
|
||||||
|
self._populate_attributes(token)
|
||||||
|
return token
|
||||||
|
|
||||||
|
def _populate_attributes(self, token):
|
||||||
|
if 'oauth_token' in token:
|
||||||
|
self._client.client.resource_owner_key = token['oauth_token']
|
||||||
|
else:
|
||||||
|
raise TokenMissing(
|
||||||
|
'Response does not contain a token: {resp}'.format(resp=token),
|
||||||
|
token,
|
||||||
|
)
|
||||||
|
if 'oauth_token_secret' in token:
|
||||||
|
self._client.client.resource_owner_secret = (
|
||||||
|
token['oauth_token_secret'])
|
||||||
|
if 'oauth_verifier' in token:
|
||||||
|
self._client.client.verifier = token['oauth_verifier']
|
||||||
|
|
||||||
|
def _fetch_token(self, url, **request_kwargs):
|
||||||
|
log.debug('Fetching token from %s using client %s', url, self._client.client)
|
||||||
|
r = self.post(url, **request_kwargs)
|
||||||
|
|
||||||
|
if r.status_code >= 400:
|
||||||
|
error = "Token request failed with code %s, response was '%s'."
|
||||||
|
raise TokenRequestDenied(error % (r.status_code, r.text), r)
|
||||||
|
|
||||||
|
log.debug('Decoding token from response "%s"', r.text)
|
||||||
|
try:
|
||||||
|
token = dict(urldecode(r.text))
|
||||||
|
except ValueError as e:
|
||||||
|
error = ("Unable to decode token from token response. "
|
||||||
|
"This is commonly caused by an unsuccessful request where"
|
||||||
|
" a non urlencoded error message is returned. "
|
||||||
|
"The decoding error was %s""" % e)
|
||||||
|
raise ValueError(error)
|
||||||
|
|
||||||
|
log.debug('Obtained token %s', token)
|
||||||
|
log.debug('Updating internal client attributes from token data.')
|
||||||
|
self._populate_attributes(token)
|
||||||
|
return token
|
||||||
|
|
||||||
|
def rebuild_auth(self, prepared_request, response):
|
||||||
|
"""
|
||||||
|
When being redirected we should always strip Authorization
|
||||||
|
header, since nonce may not be reused as per OAuth spec.
|
||||||
|
"""
|
||||||
|
if 'Authorization' in prepared_request.headers:
|
||||||
|
# If we get redirected to a new host, we should strip out
|
||||||
|
# any authentication headers.
|
||||||
|
prepared_request.headers.pop('Authorization', True)
|
||||||
|
prepared_request.prepare_auth(self.auth)
|
||||||
|
return
|
36
lib/requests_oauthlib/oauth2_auth.py
Normal file
36
lib/requests_oauthlib/oauth2_auth.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError
|
||||||
|
from oauthlib.oauth2 import is_secure_transport
|
||||||
|
from requests.auth import AuthBase
|
||||||
|
|
||||||
|
|
||||||
|
class OAuth2(AuthBase):
|
||||||
|
"""Adds proof of authorization (OAuth2 token) to the request."""
|
||||||
|
|
||||||
|
def __init__(self, client_id=None, client=None, token=None):
|
||||||
|
"""Construct a new OAuth 2 authorization object.
|
||||||
|
|
||||||
|
:param client_id: Client id obtained during registration
|
||||||
|
:param client: :class:`oauthlib.oauth2.Client` to be used. Default is
|
||||||
|
WebApplicationClient which is useful for any
|
||||||
|
hosted application but not mobile or desktop.
|
||||||
|
:param token: Token dictionary, must include access_token
|
||||||
|
and token_type.
|
||||||
|
"""
|
||||||
|
self._client = client or WebApplicationClient(client_id, token=token)
|
||||||
|
if token:
|
||||||
|
for k, v in token.items():
|
||||||
|
setattr(self._client, k, v)
|
||||||
|
|
||||||
|
def __call__(self, r):
|
||||||
|
"""Append an OAuth 2 token to the request.
|
||||||
|
|
||||||
|
Note that currently HTTPS is required for all requests. There may be
|
||||||
|
a token type that allows for plain HTTP in the future and then this
|
||||||
|
should be updated to allow plain HTTP on a white list basis.
|
||||||
|
"""
|
||||||
|
if not is_secure_transport(r.url):
|
||||||
|
raise InsecureTransportError()
|
||||||
|
r.url, r.headers, r.body = self._client.add_token(r.url,
|
||||||
|
http_method=r.method, body=r.body, headers=r.headers)
|
||||||
|
return r
|
359
lib/requests_oauthlib/oauth2_session.py
Normal file
359
lib/requests_oauthlib/oauth2_session.py
Normal file
|
@ -0,0 +1,359 @@
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from oauthlib.common import generate_token, urldecode
|
||||||
|
from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError
|
||||||
|
from oauthlib.oauth2 import TokenExpiredError, is_secure_transport
|
||||||
|
import requests
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TokenUpdated(Warning):
|
||||||
|
def __init__(self, token):
|
||||||
|
super(TokenUpdated, self).__init__()
|
||||||
|
self.token = token
|
||||||
|
|
||||||
|
|
||||||
|
class OAuth2Session(requests.Session):
|
||||||
|
"""Versatile OAuth 2 extension to :class:`requests.Session`.
|
||||||
|
|
||||||
|
Supports any grant type adhering to :class:`oauthlib.oauth2.Client` spec
|
||||||
|
including the four core OAuth 2 grants.
|
||||||
|
|
||||||
|
Can be used to create authorization urls, fetch tokens and access protected
|
||||||
|
resources using the :class:`requests.Session` interface you are used to.
|
||||||
|
|
||||||
|
- :class:`oauthlib.oauth2.WebApplicationClient` (default): Authorization Code Grant
|
||||||
|
- :class:`oauthlib.oauth2.MobileApplicationClient`: Implicit Grant
|
||||||
|
- :class:`oauthlib.oauth2.LegacyApplicationClient`: Password Credentials Grant
|
||||||
|
- :class:`oauthlib.oauth2.BackendApplicationClient`: Client Credentials Grant
|
||||||
|
|
||||||
|
Note that the only time you will be using Implicit Grant from python is if
|
||||||
|
you are driving a user agent able to obtain URL fragments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, client_id=None, client=None, auto_refresh_url=None,
|
||||||
|
auto_refresh_kwargs=None, scope=None, redirect_uri=None, token=None,
|
||||||
|
state=None, token_updater=None, **kwargs):
|
||||||
|
"""Construct a new OAuth 2 client session.
|
||||||
|
|
||||||
|
:param client_id: Client id obtained during registration
|
||||||
|
:param client: :class:`oauthlib.oauth2.Client` to be used. Default is
|
||||||
|
WebApplicationClient which is useful for any
|
||||||
|
hosted application but not mobile or desktop.
|
||||||
|
:param scope: List of scopes you wish to request access to
|
||||||
|
:param redirect_uri: Redirect URI you registered as callback
|
||||||
|
:param token: Token dictionary, must include access_token
|
||||||
|
and token_type.
|
||||||
|
:param state: State string used to prevent CSRF. This will be given
|
||||||
|
when creating the authorization url and must be supplied
|
||||||
|
when parsing the authorization response.
|
||||||
|
Can be either a string or a no argument callable.
|
||||||
|
:auto_refresh_url: Refresh token endpoint URL, must be HTTPS. Supply
|
||||||
|
this if you wish the client to automatically refresh
|
||||||
|
your access tokens.
|
||||||
|
:auto_refresh_kwargs: Extra arguments to pass to the refresh token
|
||||||
|
endpoint.
|
||||||
|
:token_updater: Method with one argument, token, to be used to update
|
||||||
|
your token databse on automatic token refresh. If not
|
||||||
|
set a TokenUpdated warning will be raised when a token
|
||||||
|
has been refreshed. This warning will carry the token
|
||||||
|
in its token argument.
|
||||||
|
:param kwargs: Arguments to pass to the Session constructor.
|
||||||
|
"""
|
||||||
|
super(OAuth2Session, self).__init__(**kwargs)
|
||||||
|
self._client = client or WebApplicationClient(client_id, token=token)
|
||||||
|
self.token = token or {}
|
||||||
|
self.scope = scope
|
||||||
|
self.redirect_uri = redirect_uri
|
||||||
|
self.state = state or generate_token
|
||||||
|
self._state = state
|
||||||
|
self.auto_refresh_url = auto_refresh_url
|
||||||
|
self.auto_refresh_kwargs = auto_refresh_kwargs or {}
|
||||||
|
self.token_updater = token_updater
|
||||||
|
|
||||||
|
# Allow customizations for non compliant providers through various
|
||||||
|
# hooks to adjust requests and responses.
|
||||||
|
self.compliance_hook = {
|
||||||
|
'access_token_response': set([]),
|
||||||
|
'refresh_token_response': set([]),
|
||||||
|
'protected_request': set([]),
|
||||||
|
}
|
||||||
|
|
||||||
|
def new_state(self):
|
||||||
|
"""Generates a state string to be used in authorizations."""
|
||||||
|
try:
|
||||||
|
self._state = self.state()
|
||||||
|
log.debug('Generated new state %s.', self._state)
|
||||||
|
except TypeError:
|
||||||
|
self._state = self.state
|
||||||
|
log.debug('Re-using previously supplied state %s.', self._state)
|
||||||
|
return self._state
|
||||||
|
|
||||||
|
@property
|
||||||
|
def client_id(self):
|
||||||
|
return getattr(self._client, "client_id", None)
|
||||||
|
|
||||||
|
@client_id.setter
|
||||||
|
def client_id(self, value):
|
||||||
|
self._client.client_id = value
|
||||||
|
|
||||||
|
@client_id.deleter
|
||||||
|
def client_id(self):
|
||||||
|
del self._client.client_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def token(self):
|
||||||
|
return getattr(self._client, "token", None)
|
||||||
|
|
||||||
|
@token.setter
|
||||||
|
def token(self, value):
|
||||||
|
self._client.token = value
|
||||||
|
self._client._populate_attributes(value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_token(self):
|
||||||
|
return getattr(self._client, "access_token", None)
|
||||||
|
|
||||||
|
@access_token.setter
|
||||||
|
def access_token(self, value):
|
||||||
|
self._client.access_token = value
|
||||||
|
|
||||||
|
@access_token.deleter
|
||||||
|
def access_token(self):
|
||||||
|
del self._client.access_token
|
||||||
|
|
||||||
|
@property
|
||||||
|
def authorized(self):
|
||||||
|
"""Boolean that indicates whether this session has an OAuth token
|
||||||
|
or not. If `self.authorized` is True, you can reasonably expect
|
||||||
|
OAuth-protected requests to the resource to succeed. If
|
||||||
|
`self.authorized` is False, you need the user to go through the OAuth
|
||||||
|
authentication dance before OAuth-protected requests to the resource
|
||||||
|
will succeed.
|
||||||
|
"""
|
||||||
|
return bool(self.access_token)
|
||||||
|
|
||||||
|
def authorization_url(self, url, state=None, **kwargs):
|
||||||
|
"""Form an authorization URL.
|
||||||
|
|
||||||
|
:param url: Authorization endpoint url, must be HTTPS.
|
||||||
|
:param state: An optional state string for CSRF protection. If not
|
||||||
|
given it will be generated for you.
|
||||||
|
:param kwargs: Extra parameters to include.
|
||||||
|
:return: authorization_url, state
|
||||||
|
"""
|
||||||
|
state = state or self.new_state()
|
||||||
|
return self._client.prepare_request_uri(url,
|
||||||
|
redirect_uri=self.redirect_uri,
|
||||||
|
scope=self.scope,
|
||||||
|
state=state,
|
||||||
|
**kwargs), state
|
||||||
|
|
||||||
|
def fetch_token(self, token_url, code=None, authorization_response=None,
|
||||||
|
body='', auth=None, username=None, password=None, method='POST',
|
||||||
|
timeout=None, headers=None, verify=True, **kwargs):
|
||||||
|
"""Generic method for fetching an access token from the token endpoint.
|
||||||
|
|
||||||
|
If you are using the MobileApplicationClient you will want to use
|
||||||
|
token_from_fragment instead of fetch_token.
|
||||||
|
|
||||||
|
:param token_url: Token endpoint URL, must use HTTPS.
|
||||||
|
:param code: Authorization code (used by WebApplicationClients).
|
||||||
|
:param authorization_response: Authorization response URL, the callback
|
||||||
|
URL of the request back to you. Used by
|
||||||
|
WebApplicationClients instead of code.
|
||||||
|
:param body: Optional application/x-www-form-urlencoded body to add the
|
||||||
|
include in the token request. Prefer kwargs over body.
|
||||||
|
:param auth: An auth tuple or method as accepted by requests.
|
||||||
|
:param username: Username used by LegacyApplicationClients.
|
||||||
|
:param password: Password used by LegacyApplicationClients.
|
||||||
|
:param method: The HTTP method used to make the request. Defaults
|
||||||
|
to POST, but may also be GET. Other methods should
|
||||||
|
be added as needed.
|
||||||
|
:param headers: Dict to default request headers with.
|
||||||
|
:param timeout: Timeout of the request in seconds.
|
||||||
|
:param verify: Verify SSL certificate.
|
||||||
|
:param kwargs: Extra parameters to include in the token request.
|
||||||
|
:return: A token dict
|
||||||
|
"""
|
||||||
|
if not is_secure_transport(token_url):
|
||||||
|
raise InsecureTransportError()
|
||||||
|
|
||||||
|
if not code and authorization_response:
|
||||||
|
self._client.parse_request_uri_response(authorization_response,
|
||||||
|
state=self._state)
|
||||||
|
code = self._client.code
|
||||||
|
elif not code and isinstance(self._client, WebApplicationClient):
|
||||||
|
code = self._client.code
|
||||||
|
if not code:
|
||||||
|
raise ValueError('Please supply either code or '
|
||||||
|
'authorization_code parameters.')
|
||||||
|
|
||||||
|
|
||||||
|
body = self._client.prepare_request_body(code=code, body=body,
|
||||||
|
redirect_uri=self.redirect_uri, username=username,
|
||||||
|
password=password, **kwargs)
|
||||||
|
|
||||||
|
if (not auth) and username:
|
||||||
|
if password is None:
|
||||||
|
raise ValueError('Username was supplied, but not password.')
|
||||||
|
auth = requests.auth.HTTPBasicAuth(username, password)
|
||||||
|
|
||||||
|
headers = headers or {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||||
|
}
|
||||||
|
self.token = {}
|
||||||
|
if method.upper() == 'POST':
|
||||||
|
r = self.post(token_url, data=dict(urldecode(body)),
|
||||||
|
timeout=timeout, headers=headers, auth=auth,
|
||||||
|
verify=verify)
|
||||||
|
log.debug('Prepared fetch token request body %s', body)
|
||||||
|
elif method.upper() == 'GET':
|
||||||
|
# if method is not 'POST', switch body to querystring and GET
|
||||||
|
r = self.get(token_url, params=dict(urldecode(body)),
|
||||||
|
timeout=timeout, headers=headers, auth=auth,
|
||||||
|
verify=verify)
|
||||||
|
log.debug('Prepared fetch token request querystring %s', body)
|
||||||
|
else:
|
||||||
|
raise ValueError('The method kwarg must be POST or GET.')
|
||||||
|
|
||||||
|
log.debug('Request to fetch token completed with status %s.',
|
||||||
|
r.status_code)
|
||||||
|
log.debug('Request headers were %s', r.request.headers)
|
||||||
|
log.debug('Request body was %s', r.request.body)
|
||||||
|
log.debug('Response headers were %s and content %s.',
|
||||||
|
r.headers, r.text)
|
||||||
|
log.debug('Invoking %d token response hooks.',
|
||||||
|
len(self.compliance_hook['access_token_response']))
|
||||||
|
for hook in self.compliance_hook['access_token_response']:
|
||||||
|
log.debug('Invoking hook %s.', hook)
|
||||||
|
r = hook(r)
|
||||||
|
|
||||||
|
self._client.parse_request_body_response(r.text, scope=self.scope)
|
||||||
|
self.token = self._client.token
|
||||||
|
log.debug('Obtained token %s.', self.token)
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def token_from_fragment(self, authorization_response):
|
||||||
|
"""Parse token from the URI fragment, used by MobileApplicationClients.
|
||||||
|
|
||||||
|
:param authorization_response: The full URL of the redirect back to you
|
||||||
|
:return: A token dict
|
||||||
|
"""
|
||||||
|
self._client.parse_request_uri_response(authorization_response,
|
||||||
|
state=self._state)
|
||||||
|
self.token = self._client.token
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def refresh_token(self, token_url, refresh_token=None, body='', auth=None,
|
||||||
|
timeout=None, headers=None, verify=True, **kwargs):
|
||||||
|
"""Fetch a new access token using a refresh token.
|
||||||
|
|
||||||
|
:param token_url: The token endpoint, must be HTTPS.
|
||||||
|
:param refresh_token: The refresh_token to use.
|
||||||
|
:param body: Optional application/x-www-form-urlencoded body to add the
|
||||||
|
include in the token request. Prefer kwargs over body.
|
||||||
|
:param auth: An auth tuple or method as accepted by requests.
|
||||||
|
:param timeout: Timeout of the request in seconds.
|
||||||
|
:param verify: Verify SSL certificate.
|
||||||
|
:param kwargs: Extra parameters to include in the token request.
|
||||||
|
:return: A token dict
|
||||||
|
"""
|
||||||
|
if not token_url:
|
||||||
|
raise ValueError('No token endpoint set for auto_refresh.')
|
||||||
|
|
||||||
|
if not is_secure_transport(token_url):
|
||||||
|
raise InsecureTransportError()
|
||||||
|
|
||||||
|
refresh_token = refresh_token or self.token.get('refresh_token')
|
||||||
|
|
||||||
|
log.debug('Adding auto refresh key word arguments %s.',
|
||||||
|
self.auto_refresh_kwargs)
|
||||||
|
kwargs.update(self.auto_refresh_kwargs)
|
||||||
|
body = self._client.prepare_refresh_body(body=body,
|
||||||
|
refresh_token=refresh_token, scope=self.scope, **kwargs)
|
||||||
|
log.debug('Prepared refresh token request body %s', body)
|
||||||
|
|
||||||
|
if headers is None:
|
||||||
|
headers = {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': (
|
||||||
|
'application/x-www-form-urlencoded;charset=UTF-8'
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
r = self.post(token_url, data=dict(urldecode(body)), auth=auth,
|
||||||
|
timeout=timeout, headers=headers, verify=verify, withhold_token=True)
|
||||||
|
log.debug('Request to refresh token completed with status %s.',
|
||||||
|
r.status_code)
|
||||||
|
log.debug('Response headers were %s and content %s.',
|
||||||
|
r.headers, r.text)
|
||||||
|
log.debug('Invoking %d token response hooks.',
|
||||||
|
len(self.compliance_hook['refresh_token_response']))
|
||||||
|
for hook in self.compliance_hook['refresh_token_response']:
|
||||||
|
log.debug('Invoking hook %s.', hook)
|
||||||
|
r = hook(r)
|
||||||
|
|
||||||
|
self.token = self._client.parse_request_body_response(r.text, scope=self.scope)
|
||||||
|
if not 'refresh_token' in self.token:
|
||||||
|
log.debug('No new refresh token given. Re-using old.')
|
||||||
|
self.token['refresh_token'] = refresh_token
|
||||||
|
return self.token
|
||||||
|
|
||||||
|
def request(self, method, url, data=None, headers=None, withhold_token=False, **kwargs):
|
||||||
|
"""Intercept all requests and add the OAuth 2 token if present."""
|
||||||
|
if not is_secure_transport(url):
|
||||||
|
raise InsecureTransportError()
|
||||||
|
if self.token and not withhold_token:
|
||||||
|
log.debug('Invoking %d protected resource request hooks.',
|
||||||
|
len(self.compliance_hook['protected_request']))
|
||||||
|
for hook in self.compliance_hook['protected_request']:
|
||||||
|
log.debug('Invoking hook %s.', hook)
|
||||||
|
url, headers, data = hook(url, headers, data)
|
||||||
|
|
||||||
|
log.debug('Adding token %s to request.', self.token)
|
||||||
|
try:
|
||||||
|
url, headers, data = self._client.add_token(url,
|
||||||
|
http_method=method, body=data, headers=headers)
|
||||||
|
# Attempt to retrieve and save new access token if expired
|
||||||
|
except TokenExpiredError:
|
||||||
|
if self.auto_refresh_url:
|
||||||
|
log.debug('Auto refresh is set, attempting to refresh at %s.',
|
||||||
|
self.auto_refresh_url)
|
||||||
|
token = self.refresh_token(self.auto_refresh_url, **kwargs)
|
||||||
|
if self.token_updater:
|
||||||
|
log.debug('Updating token to %s using %s.',
|
||||||
|
token, self.token_updater)
|
||||||
|
self.token_updater(token)
|
||||||
|
url, headers, data = self._client.add_token(url,
|
||||||
|
http_method=method, body=data, headers=headers)
|
||||||
|
else:
|
||||||
|
raise TokenUpdated(token)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
log.debug('Requesting url %s using method %s.', url, method)
|
||||||
|
log.debug('Supplying headers %s and data %s', headers, data)
|
||||||
|
log.debug('Passing through key word arguments %s.', kwargs)
|
||||||
|
return super(OAuth2Session, self).request(method, url,
|
||||||
|
headers=headers, data=data, **kwargs)
|
||||||
|
|
||||||
|
def register_compliance_hook(self, hook_type, hook):
|
||||||
|
"""Register a hook for request/response tweaking.
|
||||||
|
|
||||||
|
Available hooks are:
|
||||||
|
access_token_response invoked before token parsing.
|
||||||
|
refresh_token_response invoked before refresh token parsing.
|
||||||
|
protected_request invoked before making a request.
|
||||||
|
|
||||||
|
If you find a new hook is needed please send a GitHub PR request
|
||||||
|
or open an issue.
|
||||||
|
"""
|
||||||
|
if hook_type not in self.compliance_hook:
|
||||||
|
raise ValueError('Hook type %s is not in %s.',
|
||||||
|
hook_type, self.compliance_hook)
|
||||||
|
self.compliance_hook[hook_type].add(hook)
|
56
lib/twitter/__init__.py
Normal file
56
lib/twitter/__init__.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# vim: sw=2 ts=2 sts=2
|
||||||
|
#
|
||||||
|
# Copyright 2007 The Python-Twitter Developers
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""A library that provides a Python interface to the Twitter API"""
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
__author__ = 'The Python-Twitter Developers'
|
||||||
|
__email__ = 'python-twitter@googlegroups.com'
|
||||||
|
__copyright__ = 'Copyright (c) 2007-2016 The Python-Twitter Developers'
|
||||||
|
__license__ = 'Apache License 2.0'
|
||||||
|
__version__ = '3.0rc1'
|
||||||
|
__url__ = 'https://github.com/bear/python-twitter'
|
||||||
|
__download_url__ = 'https://pypi.python.org/pypi/python-twitter'
|
||||||
|
__description__ = 'A Python wrapper around the Twitter API'
|
||||||
|
|
||||||
|
|
||||||
|
import json # noqa
|
||||||
|
|
||||||
|
try:
|
||||||
|
from hashlib import md5 # noqa
|
||||||
|
except ImportError:
|
||||||
|
from md5 import md5 # noqa
|
||||||
|
|
||||||
|
from ._file_cache import _FileCache # noqa
|
||||||
|
from .error import TwitterError # noqa
|
||||||
|
from .parse_tweet import ParseTweet # noqa
|
||||||
|
|
||||||
|
from .models import ( # noqa
|
||||||
|
Category, # noqa
|
||||||
|
DirectMessage, # noqa
|
||||||
|
Hashtag, # noqa
|
||||||
|
List, # noqa
|
||||||
|
Media, # noqa
|
||||||
|
Trend, # noqa
|
||||||
|
Url, # noqa
|
||||||
|
User, # noqa
|
||||||
|
UserStatus, # noqa
|
||||||
|
Status # noqa
|
||||||
|
)
|
||||||
|
|
||||||
|
from .api import Api # noqa
|
161
lib/twitter/_file_cache.py
Normal file
161
lib/twitter/_file_cache.py
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
from hashlib import md5
|
||||||
|
|
||||||
|
|
||||||
|
class _FileCacheError(Exception):
|
||||||
|
"""Base exception class for FileCache related errors"""
|
||||||
|
|
||||||
|
|
||||||
|
class _FileCache(object):
|
||||||
|
DEPTH = 3
|
||||||
|
|
||||||
|
def __init__(self, root_directory=None):
|
||||||
|
self._InitializeRootDirectory(root_directory)
|
||||||
|
|
||||||
|
def Get(self, key):
|
||||||
|
path = self._GetPath(key)
|
||||||
|
if os.path.exists(path):
|
||||||
|
with open(path) as f:
|
||||||
|
return f.read()
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def Set(self, key, data):
|
||||||
|
path = self._GetPath(key)
|
||||||
|
directory = os.path.dirname(path)
|
||||||
|
if not os.path.exists(directory):
|
||||||
|
os.makedirs(directory)
|
||||||
|
if not os.path.isdir(directory):
|
||||||
|
raise _FileCacheError('%s exists but is not a directory' % directory)
|
||||||
|
temp_fd, temp_path = tempfile.mkstemp()
|
||||||
|
temp_fp = os.fdopen(temp_fd, 'w')
|
||||||
|
temp_fp.write(data)
|
||||||
|
temp_fp.close()
|
||||||
|
if not path.startswith(self._root_directory):
|
||||||
|
raise _FileCacheError('%s does not appear to live under %s' %
|
||||||
|
(path, self._root_directory))
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.remove(path)
|
||||||
|
os.rename(temp_path, path)
|
||||||
|
|
||||||
|
def Remove(self, key):
|
||||||
|
path = self._GetPath(key)
|
||||||
|
if not path.startswith(self._root_directory):
|
||||||
|
raise _FileCacheError('%s does not appear to live under %s' %
|
||||||
|
(path, self._root_directory ))
|
||||||
|
if os.path.exists(path):
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
def GetCachedTime(self, key):
|
||||||
|
path = self._GetPath(key)
|
||||||
|
if os.path.exists(path):
|
||||||
|
return os.path.getmtime(path)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _GetUsername(self):
|
||||||
|
"""Attempt to find the username in a cross-platform fashion."""
|
||||||
|
try:
|
||||||
|
return os.getenv('USER') or \
|
||||||
|
os.getenv('LOGNAME') or \
|
||||||
|
os.getenv('USERNAME') or \
|
||||||
|
os.getlogin() or \
|
||||||
|
'nobody'
|
||||||
|
except (AttributeError, IOError, OSError):
|
||||||
|
return 'nobody'
|
||||||
|
|
||||||
|
def _GetTmpCachePath(self):
|
||||||
|
username = self._GetUsername()
|
||||||
|
cache_directory = 'python.cache_' + username
|
||||||
|
return os.path.join(tempfile.gettempdir(), cache_directory)
|
||||||
|
|
||||||
|
def _InitializeRootDirectory(self, root_directory):
|
||||||
|
if not root_directory:
|
||||||
|
root_directory = self._GetTmpCachePath()
|
||||||
|
root_directory = os.path.abspath(root_directory)
|
||||||
|
try:
|
||||||
|
os.mkdir(root_directory)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST and os.path.isdir(root_directory):
|
||||||
|
# directory already exists
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# exists but is a file, or no permissions, or...
|
||||||
|
raise
|
||||||
|
self._root_directory = root_directory
|
||||||
|
|
||||||
|
def _GetPath(self, key):
|
||||||
|
try:
|
||||||
|
hashed_key = md5(key.encode('utf-8')).hexdigest()
|
||||||
|
except TypeError:
|
||||||
|
hashed_key = md5.new(key).hexdigest()
|
||||||
|
|
||||||
|
return os.path.join(self._root_directory,
|
||||||
|
self._GetPrefix(hashed_key),
|
||||||
|
hashed_key)
|
||||||
|
|
||||||
|
def _GetPrefix(self, hashed_key):
|
||||||
|
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
|
||||||
|
|
||||||
|
|
||||||
|
class ParseTweet(object):
|
||||||
|
# compile once on import
|
||||||
|
regexp = {"RT": "^RT", "MT": r"^MT", "ALNUM": r"(@[a-zA-Z0-9_]+)",
|
||||||
|
"HASHTAG": r"(#[\w\d]+)", "URL": r"([http://]?[a-zA-Z\d\/]+[\.]+[a-zA-Z\d\/\.]+)"}
|
||||||
|
regexp = dict((key, re.compile(value)) for key, value in list(regexp.items()))
|
||||||
|
|
||||||
|
def __init__(self, timeline_owner, tweet):
|
||||||
|
""" timeline_owner : twitter handle of user account. tweet - 140 chars from feed; object does all computation on construction
|
||||||
|
properties:
|
||||||
|
RT, MT - boolean
|
||||||
|
URLs - list of URL
|
||||||
|
Hashtags - list of tags
|
||||||
|
"""
|
||||||
|
self.Owner = timeline_owner
|
||||||
|
self.tweet = tweet
|
||||||
|
self.UserHandles = ParseTweet.getUserHandles(tweet)
|
||||||
|
self.Hashtags = ParseTweet.getHashtags(tweet)
|
||||||
|
self.URLs = ParseTweet.getURLs(tweet)
|
||||||
|
self.RT = ParseTweet.getAttributeRT(tweet)
|
||||||
|
self.MT = ParseTweet.getAttributeMT(tweet)
|
||||||
|
|
||||||
|
# additional intelligence
|
||||||
|
if ( self.RT and len(self.UserHandles) > 0 ): # change the owner of tweet?
|
||||||
|
self.Owner = self.UserHandles[0]
|
||||||
|
return
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
""" for display method """
|
||||||
|
return "owner %s, urls: %d, hashtags %d, user_handles %d, len_tweet %d, RT = %s, MT = %s" % (
|
||||||
|
self.Owner, len(self.URLs), len(self.Hashtags), len(self.UserHandles),
|
||||||
|
len(self.tweet), self.RT, self.MT)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAttributeRT(tweet):
|
||||||
|
""" see if tweet is a RT """
|
||||||
|
return re.search(ParseTweet.regexp["RT"], tweet.strip()) is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAttributeMT(tweet):
|
||||||
|
""" see if tweet is a MT """
|
||||||
|
return re.search(ParseTweet.regexp["MT"], tweet.strip()) is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getUserHandles(tweet):
|
||||||
|
""" given a tweet we try and extract all user handles in order of occurrence"""
|
||||||
|
return re.findall(ParseTweet.regexp["ALNUM"], tweet)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getHashtags(tweet):
|
||||||
|
""" return all hashtags"""
|
||||||
|
return re.findall(ParseTweet.regexp["HASHTAG"], tweet)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getURLs(tweet):
|
||||||
|
""" URL : [http://]?[\w\.?/]+"""
|
||||||
|
return re.findall(ParseTweet.regexp["URL"], tweet)
|
4534
lib/twitter/api.py
Normal file
4534
lib/twitter/api.py
Normal file
File diff suppressed because it is too large
Load diff
10
lib/twitter/error.py
Normal file
10
lib/twitter/error.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
|
||||||
|
class TwitterError(Exception):
|
||||||
|
"""Base class for Twitter errors"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def message(self):
|
||||||
|
'''Returns the first argument used to construct this error.'''
|
||||||
|
return self.args[0]
|
476
lib/twitter/models.py
Normal file
476
lib/twitter/models.py
Normal file
|
@ -0,0 +1,476 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
from calendar import timegm
|
||||||
|
|
||||||
|
try:
|
||||||
|
from rfc822 import parsedate
|
||||||
|
except ImportError:
|
||||||
|
from email.utils import parsedate
|
||||||
|
|
||||||
|
|
||||||
|
class TwitterModel(object):
|
||||||
|
|
||||||
|
""" Base class from which all twitter models will inherit. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {}
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
""" Returns a string representation of TwitterModel. By default
|
||||||
|
this is the same as AsJsonString(). """
|
||||||
|
return self.AsJsonString()
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return other and self.AsDict() == other.AsDict()
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self.__eq__(other)
|
||||||
|
|
||||||
|
def AsJsonString(self):
|
||||||
|
""" Returns the TwitterModel as a JSON string based on key/value
|
||||||
|
pairs returned from the AsDict() method. """
|
||||||
|
return json.dumps(self.AsDict(), sort_keys=True)
|
||||||
|
|
||||||
|
def AsDict(self):
|
||||||
|
""" Create a dictionary representation of the object. Please see inline
|
||||||
|
comments on construction when dictionaries contain TwitterModels. """
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
for (key, value) in self.param_defaults.items():
|
||||||
|
|
||||||
|
# If the value is a list, we need to create a list to hold the
|
||||||
|
# dicts created by an object supporting the AsDict() method,
|
||||||
|
# i.e., if it inherits from TwitterModel. If the item in the list
|
||||||
|
# doesn't support the AsDict() method, then we assign the value
|
||||||
|
# directly. An example being a list of Media objects contained
|
||||||
|
# within a Status object.
|
||||||
|
if isinstance(getattr(self, key, None), (list, tuple, set)):
|
||||||
|
data[key] = list()
|
||||||
|
for subobj in getattr(self, key, None):
|
||||||
|
if getattr(subobj, 'AsDict', None):
|
||||||
|
data[key].append(subobj.AsDict())
|
||||||
|
else:
|
||||||
|
data[key].append(subobj)
|
||||||
|
|
||||||
|
# Not a list, *but still a subclass of TwitterModel* and
|
||||||
|
# and we can assign the data[key] directly with the AsDict()
|
||||||
|
# method of the object. An example being a Status object contained
|
||||||
|
# within a User object.
|
||||||
|
elif getattr(getattr(self, key, None), 'AsDict', None):
|
||||||
|
data[key] = getattr(self, key).AsDict()
|
||||||
|
|
||||||
|
# If the value doesn't have an AsDict() method, i.e., it's not
|
||||||
|
# something that subclasses TwitterModel, then we can use direct
|
||||||
|
# assigment.
|
||||||
|
elif getattr(self, key, None):
|
||||||
|
data[key] = getattr(self, key, None)
|
||||||
|
return data
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def NewFromJsonDict(cls, data, **kwargs):
|
||||||
|
""" Create a new instance based on a JSON dict. Any kwargs should be
|
||||||
|
supplied by the inherited, calling class.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: A JSON dict, as converted from the JSON in the twitter API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
if kwargs:
|
||||||
|
for key, val in kwargs.items():
|
||||||
|
data[key] = val
|
||||||
|
|
||||||
|
return cls(**data)
|
||||||
|
|
||||||
|
|
||||||
|
class Media(TwitterModel):
|
||||||
|
|
||||||
|
"""A class representing the Media component of a tweet. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'display_url': None,
|
||||||
|
'expanded_url': None,
|
||||||
|
'id': None,
|
||||||
|
'media_url': None,
|
||||||
|
'media_url_https': None,
|
||||||
|
'type': None,
|
||||||
|
'url': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Media(ID={media_id}, Type={media_type}, DisplayURL='{url}')".format(
|
||||||
|
media_id=self.id,
|
||||||
|
media_type=self.type,
|
||||||
|
url=self.display_url)
|
||||||
|
|
||||||
|
|
||||||
|
class List(TwitterModel):
|
||||||
|
|
||||||
|
"""A class representing the List structure used by the twitter API. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'description': None,
|
||||||
|
'following': None,
|
||||||
|
'full_name': None,
|
||||||
|
'id': None,
|
||||||
|
'member_count': None,
|
||||||
|
'mode': None,
|
||||||
|
'name': None,
|
||||||
|
'slug': None,
|
||||||
|
'subscriber_count': None,
|
||||||
|
'uri': None,
|
||||||
|
'user': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
if 'user' in kwargs:
|
||||||
|
self.user = User.NewFromJsonDict(kwargs.get('user'))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "List(ID={list_id}, FullName={full_name!r}, Slug={slug}, User={user})".format(
|
||||||
|
list_id=self.id,
|
||||||
|
full_name=self.full_name,
|
||||||
|
slug=self.slug,
|
||||||
|
user=self.user.screen_name)
|
||||||
|
|
||||||
|
|
||||||
|
class Category(TwitterModel):
|
||||||
|
|
||||||
|
"""A class representing the suggested user category structure. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'name': None,
|
||||||
|
'size': None,
|
||||||
|
'slug': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Category(Name={name!r}, Slug={slug}, Size={size})".format(
|
||||||
|
name=self.name,
|
||||||
|
slug=self.slug,
|
||||||
|
size=self.size)
|
||||||
|
|
||||||
|
|
||||||
|
class DirectMessage(TwitterModel):
|
||||||
|
|
||||||
|
"""A class representing a Direct Message. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'created_at': None,
|
||||||
|
'id': None,
|
||||||
|
'recipient_id': None,
|
||||||
|
'recipient_screen_name': None,
|
||||||
|
'sender_id': None,
|
||||||
|
'sender_screen_name': None,
|
||||||
|
'text': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.text and len(self.text) > 140:
|
||||||
|
text = "{text}[...]".format(text=self.text[:140])
|
||||||
|
else:
|
||||||
|
text = self.text
|
||||||
|
return "DirectMessage(ID={dm_id}, Sender={sender}, Created={time}, Text='{text!r}')".format(
|
||||||
|
dm_id=self.id,
|
||||||
|
sender=self.sender_screen_name,
|
||||||
|
time=self.created_at,
|
||||||
|
text=text)
|
||||||
|
|
||||||
|
|
||||||
|
class Trend(TwitterModel):
|
||||||
|
|
||||||
|
""" A class representing a trending topic. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'events': None,
|
||||||
|
'name': None,
|
||||||
|
'promoted_content': None,
|
||||||
|
'query': None,
|
||||||
|
'timestamp': None,
|
||||||
|
'url': None,
|
||||||
|
'volume': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Trend(Name={0!r}, Time={1}, URL={2})".format(
|
||||||
|
self.name,
|
||||||
|
self.timestamp,
|
||||||
|
self.url)
|
||||||
|
|
||||||
|
|
||||||
|
class Hashtag(TwitterModel):
|
||||||
|
|
||||||
|
""" A class representing a twitter hashtag. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'text': None
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Hashtag(Text={text!r})".format(
|
||||||
|
text=self.text)
|
||||||
|
|
||||||
|
|
||||||
|
class Url(TwitterModel):
|
||||||
|
|
||||||
|
""" A class representing an URL contained in a tweet. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'expanded_url': None,
|
||||||
|
'url': None}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "URL(URL={url}, ExpandedURL={eurl})".format(
|
||||||
|
url=self.url,
|
||||||
|
eurl=self.expanded_url)
|
||||||
|
|
||||||
|
|
||||||
|
class UserStatus(TwitterModel):
|
||||||
|
|
||||||
|
""" A class representing the UserStatus structure. This is an abbreviated
|
||||||
|
form of the twitter.User object. """
|
||||||
|
|
||||||
|
connections = {'following': False,
|
||||||
|
'followed_by': False,
|
||||||
|
'following_received': False,
|
||||||
|
'following_requested': False,
|
||||||
|
'blocking': False,
|
||||||
|
'muting': False}
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'blocking': False,
|
||||||
|
'followed_by': False,
|
||||||
|
'following': False,
|
||||||
|
'following_received': False,
|
||||||
|
'following_requested': False,
|
||||||
|
'id': None,
|
||||||
|
'id_str': None,
|
||||||
|
'muting': False,
|
||||||
|
'name': None,
|
||||||
|
'screen_name': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
if 'connections' in kwargs:
|
||||||
|
for param in self.connections:
|
||||||
|
if param in kwargs['connections']:
|
||||||
|
setattr(self, param, True)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
connections = [param for param in self.connections if getattr(self, param)]
|
||||||
|
return "UserStatus(ID={uid}, ScreenName={sn}, Connections=[{conn}])".format(
|
||||||
|
uid=self.id,
|
||||||
|
sn=self.screen_name,
|
||||||
|
conn=", ".join(connections))
|
||||||
|
|
||||||
|
|
||||||
|
class User(TwitterModel):
|
||||||
|
|
||||||
|
"""A class representing the User structure. """
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'contributors_enabled': None,
|
||||||
|
'created_at': None,
|
||||||
|
'default_profile': None,
|
||||||
|
'default_profile_image': None,
|
||||||
|
'description': None,
|
||||||
|
'favourites_count': None,
|
||||||
|
'followers_count': None,
|
||||||
|
'friends_count': None,
|
||||||
|
'geo_enabled': None,
|
||||||
|
'id': None,
|
||||||
|
'lang': None,
|
||||||
|
'listed_count': None,
|
||||||
|
'location': None,
|
||||||
|
'name': None,
|
||||||
|
'notifications': None,
|
||||||
|
'profile_background_color': None,
|
||||||
|
'profile_background_image_url': None,
|
||||||
|
'profile_background_tile': None,
|
||||||
|
'profile_banner_url': None,
|
||||||
|
'profile_image_url': None,
|
||||||
|
'profile_link_color': None,
|
||||||
|
'profile_sidebar_fill_color': None,
|
||||||
|
'profile_text_color': None,
|
||||||
|
'protected': None,
|
||||||
|
'screen_name': None,
|
||||||
|
'status': None,
|
||||||
|
'statuses_count': None,
|
||||||
|
'time_zone': None,
|
||||||
|
'url': None,
|
||||||
|
'utc_offset': None,
|
||||||
|
'verified': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "User(ID={uid}, ScreenName={sn})".format(
|
||||||
|
uid=self.id,
|
||||||
|
sn=self.screen_name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def NewFromJsonDict(cls, data, **kwargs):
|
||||||
|
from twitter import Status
|
||||||
|
if data.get('status', None):
|
||||||
|
status = Status.NewFromJsonDict(data.get('status'))
|
||||||
|
return super(cls, cls).NewFromJsonDict(data=data, status=status)
|
||||||
|
else:
|
||||||
|
return super(cls, cls).NewFromJsonDict(data=data)
|
||||||
|
|
||||||
|
|
||||||
|
class Status(TwitterModel):
|
||||||
|
"""A class representing the Status structure used by the twitter API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.param_defaults = {
|
||||||
|
'contributors': None,
|
||||||
|
'coordinates': None,
|
||||||
|
'created_at': None,
|
||||||
|
'current_user_retweet': None,
|
||||||
|
'favorite_count': None,
|
||||||
|
'favorited': None,
|
||||||
|
'geo': None,
|
||||||
|
'hashtags': None,
|
||||||
|
'id': None,
|
||||||
|
'id_str': None,
|
||||||
|
'in_reply_to_screen_name': None,
|
||||||
|
'in_reply_to_status_id': None,
|
||||||
|
'in_reply_to_user_id': None,
|
||||||
|
'lang': None,
|
||||||
|
'location': None,
|
||||||
|
'media': None,
|
||||||
|
'place': None,
|
||||||
|
'possibly_sensitive': None,
|
||||||
|
'retweet_count': None,
|
||||||
|
'retweeted': None,
|
||||||
|
'retweeted_status': None,
|
||||||
|
'scopes': None,
|
||||||
|
'source': None,
|
||||||
|
'text': None,
|
||||||
|
'truncated': None,
|
||||||
|
'urls': None,
|
||||||
|
'user': None,
|
||||||
|
'user_mentions': None,
|
||||||
|
'withheld_copyright': None,
|
||||||
|
'withheld_in_countries': None,
|
||||||
|
'withheld_scope': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
for (param, default) in self.param_defaults.items():
|
||||||
|
setattr(self, param, kwargs.get(param, default))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def created_at_in_seconds(self):
|
||||||
|
""" Get the time this status message was posted, in seconds since
|
||||||
|
the epoch (1 Jan 1970).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: The time this status message was posted, in seconds since
|
||||||
|
the epoch.
|
||||||
|
"""
|
||||||
|
return timegm(parsedate(self.created_at))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
""" A string representation of this twitter.Status instance.
|
||||||
|
The return value is the ID of status, username and datetime.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
string: A string representation of this twitter.Status instance with
|
||||||
|
the ID of status, username and datetime.
|
||||||
|
"""
|
||||||
|
if self.user:
|
||||||
|
return "Status(ID={0}, ScreenName={1}, Created={2}, Text={3!r})".format(
|
||||||
|
self.id,
|
||||||
|
self.user.screen_name,
|
||||||
|
self.created_at,
|
||||||
|
self.text)
|
||||||
|
else:
|
||||||
|
return u"Status(ID={0}, Created={1}, Text={2!r})".format(
|
||||||
|
self.id,
|
||||||
|
self.created_at,
|
||||||
|
self.text)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def NewFromJsonDict(cls, data, **kwargs):
|
||||||
|
""" Create a new instance based on a JSON dict.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: A JSON dict, as converted from the JSON in the twitter API
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A twitter.Status instance
|
||||||
|
"""
|
||||||
|
current_user_retweet = None
|
||||||
|
hashtags = None
|
||||||
|
media = None
|
||||||
|
retweeted_status = None
|
||||||
|
urls = None
|
||||||
|
user = None
|
||||||
|
user_mentions = None
|
||||||
|
|
||||||
|
if 'user' in data:
|
||||||
|
user = User.NewFromJsonDict(data['user'])
|
||||||
|
if 'retweeted_status' in data:
|
||||||
|
retweeted_status = Status.NewFromJsonDict(data['retweeted_status'])
|
||||||
|
if 'current_user_retweet' in data:
|
||||||
|
current_user_retweet = data['current_user_retweet']['id']
|
||||||
|
|
||||||
|
if 'entities' in data:
|
||||||
|
if 'urls' in data['entities']:
|
||||||
|
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
|
||||||
|
if 'user_mentions' in data['entities']:
|
||||||
|
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
|
||||||
|
if 'hashtags' in data['entities']:
|
||||||
|
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
|
||||||
|
if 'media' in data['entities']:
|
||||||
|
media = [Media.NewFromJsonDict(m) for m in data['entities']['media']]
|
||||||
|
|
||||||
|
# the new extended entities
|
||||||
|
if 'extended_entities' in data:
|
||||||
|
if 'media' in data['extended_entities']:
|
||||||
|
media = [Media.NewFromJsonDict(m) for m in data['extended_entities']['media']]
|
||||||
|
|
||||||
|
return super(cls, cls).NewFromJsonDict(data=data,
|
||||||
|
current_user_retweet=current_user_retweet,
|
||||||
|
hashtags=hashtags,
|
||||||
|
media=media,
|
||||||
|
retweeted_status=retweeted_status,
|
||||||
|
urls=urls,
|
||||||
|
user=user,
|
||||||
|
user_mentions=user_mentions)
|
98
lib/twitter/parse_tweet.py
Normal file
98
lib/twitter/parse_tweet.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
class Emoticons:
|
||||||
|
POSITIVE = ["*O", "*-*", "*O*", "*o*", "* *",
|
||||||
|
":P", ":D", ":d", ":p",
|
||||||
|
";P", ";D", ";d", ";p",
|
||||||
|
":-)", ";-)", ":=)", ";=)",
|
||||||
|
":<)", ":>)", ";>)", ";=)",
|
||||||
|
"=}", ":)", "(:;)",
|
||||||
|
"(;", ":}", "{:", ";}",
|
||||||
|
"{;:]",
|
||||||
|
"[;", ":')", ";')", ":-3",
|
||||||
|
"{;", ":]",
|
||||||
|
";-3", ":-x", ";-x", ":-X",
|
||||||
|
";-X", ":-}", ";-=}", ":-]",
|
||||||
|
";-]", ":-.)",
|
||||||
|
"^_^", "^-^"]
|
||||||
|
|
||||||
|
NEGATIVE = [":(", ";(", ":'(",
|
||||||
|
"=(", "={", "):", ");",
|
||||||
|
")':", ")';", ")=", "}=",
|
||||||
|
";-{{", ";-{", ":-{{", ":-{",
|
||||||
|
":-(", ";-(",
|
||||||
|
":,)", ":'{",
|
||||||
|
"[:", ";]"
|
||||||
|
]
|
||||||
|
|
||||||
|
class ParseTweet(object):
|
||||||
|
# compile once on import
|
||||||
|
regexp = {"RT": "^RT", "MT": r"^MT", "ALNUM": r"(@[a-zA-Z0-9_]+)",
|
||||||
|
"HASHTAG": r"(#[\w\d]+)", "URL": r"([https://|http://]?[a-zA-Z\d\/]+[\.]+[a-zA-Z\d\/\.]+)",
|
||||||
|
"SPACES": r"\s+"}
|
||||||
|
regexp = dict((key, re.compile(value)) for key, value in regexp.items())
|
||||||
|
|
||||||
|
def __init__(self, timeline_owner, tweet):
|
||||||
|
""" timeline_owner : twitter handle of user account. tweet - 140 chars from feed; object does all computation on construction
|
||||||
|
properties:
|
||||||
|
RT, MT - boolean
|
||||||
|
URLs - list of URL
|
||||||
|
Hashtags - list of tags
|
||||||
|
"""
|
||||||
|
self.Owner = timeline_owner
|
||||||
|
self.tweet = tweet
|
||||||
|
self.UserHandles = ParseTweet.getUserHandles(tweet)
|
||||||
|
self.Hashtags = ParseTweet.getHashtags(tweet)
|
||||||
|
self.URLs = ParseTweet.getURLs(tweet)
|
||||||
|
self.RT = ParseTweet.getAttributeRT(tweet)
|
||||||
|
self.MT = ParseTweet.getAttributeMT(tweet)
|
||||||
|
self.Emoticon = ParseTweet.getAttributeEmoticon(tweet)
|
||||||
|
|
||||||
|
# additional intelligence
|
||||||
|
if ( self.RT and len(self.UserHandles) > 0 ): # change the owner of tweet?
|
||||||
|
self.Owner = self.UserHandles[0]
|
||||||
|
return
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
""" for display method """
|
||||||
|
return "owner %s, urls: %d, hashtags %d, user_handles %d, len_tweet %d, RT = %s, MT = %s" % \
|
||||||
|
(self.Owner, len(self.URLs), len(self.Hashtags), len(self.UserHandles), len(self.tweet), self.RT, self.MT)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAttributeEmoticon(tweet):
|
||||||
|
""" see if tweet is contains any emoticons, +ve, -ve or neutral """
|
||||||
|
emoji = list()
|
||||||
|
for tok in re.split(ParseTweet.regexp["SPACES"], tweet.strip()):
|
||||||
|
if tok in Emoticons.POSITIVE:
|
||||||
|
emoji.append( tok )
|
||||||
|
continue
|
||||||
|
if tok in Emoticons.NEGATIVE:
|
||||||
|
emoji.append( tok )
|
||||||
|
return emoji
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAttributeRT(tweet):
|
||||||
|
""" see if tweet is a RT """
|
||||||
|
return re.search(ParseTweet.regexp["RT"], tweet.strip()) is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAttributeMT(tweet):
|
||||||
|
""" see if tweet is a MT """
|
||||||
|
return re.search(ParseTweet.regexp["MT"], tweet.strip()) is not None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getUserHandles(tweet):
|
||||||
|
""" given a tweet we try and extract all user handles in order of occurrence"""
|
||||||
|
return re.findall(ParseTweet.regexp["ALNUM"], tweet)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getHashtags(tweet):
|
||||||
|
""" return all hashtags"""
|
||||||
|
return re.findall(ParseTweet.regexp["HASHTAG"], tweet)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getURLs(tweet):
|
||||||
|
""" URL : [http://]?[\w\.?/]+"""
|
||||||
|
return re.findall(ParseTweet.regexp["URL"], tweet)
|
215
lib/twitter/ratelimit.py
Normal file
215
lib/twitter/ratelimit.py
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
from collections import namedtuple
|
||||||
|
import re
|
||||||
|
try:
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
except ImportError:
|
||||||
|
from urlparse import urlparse
|
||||||
|
|
||||||
|
from twitter.twitter_utils import enf_type
|
||||||
|
|
||||||
|
EndpointRateLimit = namedtuple('EndpointRateLimit',
|
||||||
|
['limit', 'remaining', 'reset'])
|
||||||
|
|
||||||
|
ResourceEndpoint = namedtuple('ResourceEndpoint', ['regex', 'resource'])
|
||||||
|
|
||||||
|
|
||||||
|
GEO_ID_PLACE_ID = ResourceEndpoint(re.compile(r'/geo/id/\d+'), "/geo/id/:place_id")
|
||||||
|
SAVED_SEARCHES_DESTROY_ID = ResourceEndpoint(re.compile(r'/saved_searches/destroy/\d+'), "/saved_searches/destroy/:id")
|
||||||
|
SAVED_SEARCHES_SHOW_ID = ResourceEndpoint(re.compile(r'/saved_searches/show/\d+'), "/saved_searches/show/:id")
|
||||||
|
STATUSES_RETWEETS_ID = ResourceEndpoint(re.compile(r'/statuses/retweets/\d+'), "/statuses/retweets/:id")
|
||||||
|
STATUSES_SHOW_ID = ResourceEndpoint(re.compile(r'/statuses/show'), "/statuses/show/:id")
|
||||||
|
USERS_SHOW_ID = ResourceEndpoint(re.compile(r'/users/show'), "/users/show/:id")
|
||||||
|
USERS_SUGGESTIONS_SLUG = ResourceEndpoint(re.compile(r'/users/suggestions/\w+$'), "/users/suggestions/:slug")
|
||||||
|
USERS_SUGGESTIONS_SLUG_MEMBERS = ResourceEndpoint(re.compile(r'/users/suggestions/.+/members'), "/users/suggestions/:slug/members")
|
||||||
|
|
||||||
|
NON_STANDARD_ENDPOINTS = [
|
||||||
|
GEO_ID_PLACE_ID,
|
||||||
|
SAVED_SEARCHES_DESTROY_ID,
|
||||||
|
SAVED_SEARCHES_SHOW_ID,
|
||||||
|
STATUSES_RETWEETS_ID,
|
||||||
|
STATUSES_SHOW_ID,
|
||||||
|
USERS_SHOW_ID,
|
||||||
|
USERS_SUGGESTIONS_SLUG,
|
||||||
|
USERS_SUGGESTIONS_SLUG_MEMBERS,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class RateLimit(object):
|
||||||
|
|
||||||
|
""" Object to hold the rate limit status of various endpoints for
|
||||||
|
the twitter.Api object.
|
||||||
|
|
||||||
|
This object is generally attached to the API as Api.rate_limit, but is not
|
||||||
|
created until the user makes a method call that uses _RequestUrl() or calls
|
||||||
|
Api.InitializeRateLimit(), after which it get created and populated with
|
||||||
|
rate limit data from Twitter.
|
||||||
|
|
||||||
|
Calling Api.InitializeRateLimit() populates the object with all of the
|
||||||
|
rate limits for the endpoints defined by Twitter; more info is available
|
||||||
|
here:
|
||||||
|
|
||||||
|
https://dev.twitter.com/rest/public/rate-limits
|
||||||
|
|
||||||
|
https://dev.twitter.com/rest/public/rate-limiting
|
||||||
|
|
||||||
|
https://dev.twitter.com/rest/reference/get/application/rate_limit_status
|
||||||
|
|
||||||
|
Once a resource (i.e., an endpoint) has been requested, Twitter's response
|
||||||
|
will contain the current rate limit status as part of the headers, i.e.::
|
||||||
|
|
||||||
|
x-rate-limit-limit
|
||||||
|
x-rate-limit-remaining
|
||||||
|
x-rate-limit-reset
|
||||||
|
|
||||||
|
``limit`` is the generic limit for that endpoint, ``remaining`` is how many
|
||||||
|
more times you can make a call to that endpoint, and ``reset`` is the time
|
||||||
|
(in seconds since the epoch) until remaining resets to its default for that
|
||||||
|
endpoint.
|
||||||
|
|
||||||
|
Generally speaking, each endpoint has a 15-minute reset time and endpoints
|
||||||
|
can either make 180 or 15 requests per window. According to Twitter, any
|
||||||
|
endpoint not defined in the rate limit chart or the response from a GET
|
||||||
|
request to ``application/rate_limit_status.json`` should be assumed to be
|
||||||
|
15 requests per 15 minutes.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
""" Instantiates the RateLimitObject. Takes a json dict as
|
||||||
|
kwargs and maps to the object's dictionary. So for something like:
|
||||||
|
|
||||||
|
{"resources": {
|
||||||
|
"help": {
|
||||||
|
/help/privacy": {
|
||||||
|
"limit": 15,
|
||||||
|
"remaining": 15,
|
||||||
|
"reset": 1452254278
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
the RateLimit object will have an attribute 'resources' from which you
|
||||||
|
can perform a lookup like:
|
||||||
|
|
||||||
|
api.rate_limit.get('help').get('/help/privacy')
|
||||||
|
|
||||||
|
and a dictionary of limit, remaining, and reset will be returned.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.__dict__.update(kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def url_to_resource(url):
|
||||||
|
""" Take a fully qualified URL and attempts to return the rate limit
|
||||||
|
resource family corresponding to it. For example:
|
||||||
|
|
||||||
|
>>> RateLimit.url_to_resource('https://api.twitter.com/1.1/statuses/lookup.json?id=317')
|
||||||
|
>>> '/statuses/lookup'
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url (str): URL to convert to a resource family.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
string: Resource family corresponding to the URL.
|
||||||
|
"""
|
||||||
|
resource = urlparse(url).path.replace('/1.1', '').replace('.json', '')
|
||||||
|
for non_std_endpoint in NON_STANDARD_ENDPOINTS:
|
||||||
|
if re.match(non_std_endpoint.regex, resource):
|
||||||
|
return non_std_endpoint.resource
|
||||||
|
else:
|
||||||
|
return resource
|
||||||
|
|
||||||
|
def set_unknown_limit(self, url, limit, remaining, reset):
|
||||||
|
""" If a resource family is unknown, add it to the object's
|
||||||
|
dictionary. This is to deal with new endpoints being added to
|
||||||
|
the API, but not necessarily to the information returned by
|
||||||
|
``/account/rate_limit_status.json`` endpoint.
|
||||||
|
|
||||||
|
For example, if Twitter were to add an endpoint
|
||||||
|
``/puppies/lookup.json``, the RateLimit object would create a resource
|
||||||
|
family ``puppies`` and add ``/puppies/lookup`` as the endpoint, along
|
||||||
|
with whatever limit, remaining hits available, and reset time would be
|
||||||
|
applicable to that resource+endpoint pair.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url (str):
|
||||||
|
URL of the endpoint being fetched.
|
||||||
|
limit (int):
|
||||||
|
Max number of times a user or app can hit the endpoint
|
||||||
|
before being rate limited.
|
||||||
|
remaining (int):
|
||||||
|
Number of times a user or app can access the endpoint
|
||||||
|
before being rate limited.
|
||||||
|
reset (int):
|
||||||
|
Epoch time at which the rate limit window will reset.
|
||||||
|
"""
|
||||||
|
endpoint = self.url_to_resource(url)
|
||||||
|
resource_family = endpoint.split('/')[1]
|
||||||
|
self.__dict__['resources'].update(
|
||||||
|
{resource_family: {
|
||||||
|
endpoint: {
|
||||||
|
"limit": limit,
|
||||||
|
"remaining": remaining,
|
||||||
|
"reset": reset
|
||||||
|
}}})
|
||||||
|
|
||||||
|
def get_limit(self, url):
|
||||||
|
""" Gets a EndpointRateLimit object for the given url.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url (str, optional):
|
||||||
|
URL of the endpoint for which to return the rate limit
|
||||||
|
status.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
namedtuple: EndpointRateLimit object containing rate limit
|
||||||
|
information.
|
||||||
|
"""
|
||||||
|
endpoint = self.url_to_resource(url)
|
||||||
|
resource_family = endpoint.split('/')[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
family_rates = self.resources.get(resource_family).get(endpoint)
|
||||||
|
except AttributeError:
|
||||||
|
return EndpointRateLimit(limit=15, remaining=15, reset=0)
|
||||||
|
|
||||||
|
if not family_rates:
|
||||||
|
self.set_unknown_limit(url, limit=15, remaining=15, reset=0)
|
||||||
|
return EndpointRateLimit(limit=15, remaining=15, reset=0)
|
||||||
|
|
||||||
|
return EndpointRateLimit(family_rates['limit'],
|
||||||
|
family_rates['remaining'],
|
||||||
|
family_rates['reset'])
|
||||||
|
|
||||||
|
def set_limit(self, url, limit, remaining, reset):
|
||||||
|
""" Set an endpoint's rate limits. The data used for each of the
|
||||||
|
args should come from Twitter's ``x-rate-limit`` headers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url (str):
|
||||||
|
URL of the endpoint being fetched.
|
||||||
|
limit (int):
|
||||||
|
Max number of times a user or app can hit the endpoint
|
||||||
|
before being rate limited.
|
||||||
|
remaining (int):
|
||||||
|
Number of times a user or app can access the endpoint
|
||||||
|
before being rate limited.
|
||||||
|
reset (int):
|
||||||
|
Epoch time at which the rate limit window will reset.
|
||||||
|
"""
|
||||||
|
endpoint = self.url_to_resource(url)
|
||||||
|
resource_family = endpoint.split('/')[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
family_rates = self.resources.get(resource_family).get(endpoint)
|
||||||
|
except AttributeError:
|
||||||
|
self.set_unknown_limit(url, limit, remaining, reset)
|
||||||
|
family_rates = self.resources.get(resource_family).get(endpoint)
|
||||||
|
family_rates['limit'] = enf_type('limit', int, limit)
|
||||||
|
family_rates['remaining'] = enf_type('remaining', int, remaining)
|
||||||
|
family_rates['reset'] = enf_type('reset', int, reset)
|
||||||
|
|
||||||
|
return EndpointRateLimit(family_rates['limit'],
|
||||||
|
family_rates['remaining'],
|
||||||
|
family_rates['reset'])
|
265
lib/twitter/twitter_utils.py
Normal file
265
lib/twitter/twitter_utils.py
Normal file
|
@ -0,0 +1,265 @@
|
||||||
|
# encoding: utf-8
|
||||||
|
import mimetypes
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
|
from twitter import TwitterError
|
||||||
|
|
||||||
|
|
||||||
|
TLDS = [
|
||||||
|
"ac", "ad", "ae", "af", "ag", "ai", "al", "am", "an", "ao", "aq", "ar",
|
||||||
|
"as", "at", "au", "aw", "ax", "az", "ba", "bb", "bd", "be", "bf", "bg",
|
||||||
|
"bh", "bi", "bj", "bl", "bm", "bn", "bo", "bq", "br", "bs", "bt", "bv",
|
||||||
|
"bw", "by", "bz", "ca", "cc", "cd", "cf", "cg", "ch", "ci", "ck", "cl",
|
||||||
|
"cm", "cn", "co", "cr", "cu", "cv", "cw", "cx", "cy", "cz", "de", "dj",
|
||||||
|
"dk", "dm", "do", "dz", "ec", "ee", "eg", "eh", "er", "es", "et", "eu",
|
||||||
|
"fi", "fj", "fk", "fm", "fo", "fr", "ga", "gb", "gd", "ge", "gf", "gg",
|
||||||
|
"gh", "gi", "gl", "gm", "gn", "gp", "gq", "gr", "gs", "gt", "gu", "gw",
|
||||||
|
"gy", "hk", "hm", "hn", "hr", "ht", "hu", "id", "ie", "il", "im", "in",
|
||||||
|
"io", "iq", "ir", "is", "it", "je", "jm", "jo", "jp", "ke", "kg", "kh",
|
||||||
|
"ki", "km", "kn", "kp", "kr", "kw", "ky", "kz", "la", "lb", "lc", "li",
|
||||||
|
"lk", "lr", "ls", "lt", "lu", "lv", "ly", "ma", "mc", "md", "me", "mf",
|
||||||
|
"mg", "mh", "mk", "ml", "mm", "mn", "mo", "mp", "mq", "mr", "ms", "mt",
|
||||||
|
"mu", "mv", "mw", "mx", "my", "mz", "na", "nc", "ne", "nf", "ng", "ni",
|
||||||
|
"nl", "no", "np", "nr", "nu", "nz", "om", "pa", "pe", "pf", "pg", "ph",
|
||||||
|
"pk", "pl", "pm", "pn", "pr", "ps", "pt", "pw", "py", "qa", "re", "ro",
|
||||||
|
"rs", "ru", "rw", "sa", "sb", "sc", "sd", "se", "sg", "sh", "si", "sj",
|
||||||
|
"sk", "sl", "sm", "sn", "so", "sr", "ss", "st", "su", "sv", "sx", "sy",
|
||||||
|
"sz", "tc", "td", "tf", "tg", "th", "tj", "tk", "tl", "tm", "tn", "to",
|
||||||
|
"tp", "tr", "tt", "tv", "tw", "tz", "ua", "ug", "uk", "um", "us", "uy",
|
||||||
|
"uz", "va", "vc", "ve", "vg", "vi", "vn", "vu", "wf", "ws", "ye", "yt",
|
||||||
|
"za", "zm", "zw", "ελ", "бел", "мкд", "мон", "рф", "срб", "укр", "қаз",
|
||||||
|
"հայ", "الاردن", "الجزائر", "السعودية", "المغرب", "امارات", "ایران", "بھارت",
|
||||||
|
"تونس", "سودان", "سورية", "عراق", "عمان", "فلسطين", "قطر", "مصر",
|
||||||
|
"مليسيا", "پاکستان", "भारत", "বাংলা", "ভারত", "ਭਾਰਤ", "ભારત",
|
||||||
|
"இந்தியா", "இலங்கை", "சிங்கப்பூர்", "భారత్", "ලංකා", "ไทย",
|
||||||
|
"გე", "中国", "中國", "台湾", "台灣", "新加坡", "澳門", "香港", "한국", "neric:",
|
||||||
|
"abb", "abbott", "abogado", "academy", "accenture", "accountant",
|
||||||
|
"accountants", "aco", "active", "actor", "ads", "adult", "aeg", "aero",
|
||||||
|
"afl", "agency", "aig", "airforce", "airtel", "allfinanz", "alsace",
|
||||||
|
"amsterdam", "android", "apartments", "app", "aquarelle", "archi", "army",
|
||||||
|
"arpa", "asia", "associates", "attorney", "auction", "audio", "auto",
|
||||||
|
"autos", "axa", "azure", "band", "bank", "bar", "barcelona", "barclaycard",
|
||||||
|
"barclays", "bargains", "bauhaus", "bayern", "bbc", "bbva", "bcn", "beer",
|
||||||
|
"bentley", "berlin", "best", "bet", "bharti", "bible", "bid", "bike",
|
||||||
|
"bing", "bingo", "bio", "biz", "black", "blackfriday", "bloomberg", "blue",
|
||||||
|
"bmw", "bnl", "bnpparibas", "boats", "bond", "boo", "boots", "boutique",
|
||||||
|
"bradesco", "bridgestone", "broker", "brother", "brussels", "budapest",
|
||||||
|
"build", "builders", "business", "buzz", "bzh", "cab", "cafe", "cal",
|
||||||
|
"camera", "camp", "cancerresearch", "canon", "capetown", "capital",
|
||||||
|
"caravan", "cards", "care", "career", "careers", "cars", "cartier",
|
||||||
|
"casa", "cash", "casino", "cat", "catering", "cba", "cbn", "ceb", "center",
|
||||||
|
"ceo", "cern", "cfa", "cfd", "chanel", "channel", "chat", "cheap",
|
||||||
|
"chloe", "christmas", "chrome", "church", "cisco", "citic", "city",
|
||||||
|
"claims", "cleaning", "click", "clinic", "clothing", "cloud", "club",
|
||||||
|
"coach", "codes", "coffee", "college", "cologne", "com", "commbank",
|
||||||
|
"community", "company", "computer", "condos", "construction", "consulting",
|
||||||
|
"contractors", "cooking", "cool", "coop", "corsica", "country", "coupons",
|
||||||
|
"courses", "credit", "creditcard", "cricket", "crown", "crs", "cruises",
|
||||||
|
"cuisinella", "cymru", "cyou", "dabur", "dad", "dance", "date", "dating",
|
||||||
|
"datsun", "day", "dclk", "deals", "degree", "delivery", "delta",
|
||||||
|
"democrat", "dental", "dentist", "desi", "design", "dev", "diamonds",
|
||||||
|
"diet", "digital", "direct", "directory", "discount", "dnp", "docs",
|
||||||
|
"dog", "doha", "domains", "doosan", "download", "drive", "durban", "dvag",
|
||||||
|
"earth", "eat", "edu", "education", "email", "emerck", "energy",
|
||||||
|
"engineer", "engineering", "enterprises", "epson", "equipment", "erni",
|
||||||
|
"esq", "estate", "eurovision", "eus", "events", "everbank", "exchange",
|
||||||
|
"expert", "exposed", "express", "fage", "fail", "faith", "family", "fan",
|
||||||
|
"fans", "farm", "fashion", "feedback", "film", "finance", "financial",
|
||||||
|
"firmdale", "fish", "fishing", "fit", "fitness", "flights", "florist",
|
||||||
|
"flowers", "flsmidth", "fly", "foo", "football", "forex", "forsale",
|
||||||
|
"forum", "foundation", "frl", "frogans", "fund", "furniture", "futbol",
|
||||||
|
"fyi", "gal", "gallery", "game", "garden", "gbiz", "gdn", "gent",
|
||||||
|
"genting", "ggee", "gift", "gifts", "gives", "giving", "glass", "gle",
|
||||||
|
"global", "globo", "gmail", "gmo", "gmx", "gold", "goldpoint", "golf",
|
||||||
|
"goo", "goog", "google", "gop", "gov", "graphics", "gratis", "green",
|
||||||
|
"gripe", "group", "guge", "guide", "guitars", "guru", "hamburg", "hangout",
|
||||||
|
"haus", "healthcare", "help", "here", "hermes", "hiphop", "hitachi", "hiv",
|
||||||
|
"hockey", "holdings", "holiday", "homedepot", "homes", "honda", "horse",
|
||||||
|
"host", "hosting", "hoteles", "hotmail", "house", "how", "hsbc", "ibm",
|
||||||
|
"icbc", "ice", "icu", "ifm", "iinet", "immo", "immobilien", "industries",
|
||||||
|
"infiniti", "info", "ing", "ink", "institute", "insure", "int",
|
||||||
|
"international", "investments", "ipiranga", "irish", "ist", "istanbul",
|
||||||
|
"itau", "iwc", "java", "jcb", "jetzt", "jewelry", "jlc", "jll", "jobs",
|
||||||
|
"joburg", "jprs", "juegos", "kaufen", "kddi", "kim", "kitchen", "kiwi",
|
||||||
|
"koeln", "komatsu", "krd", "kred", "kyoto", "lacaixa", "lancaster", "land",
|
||||||
|
"lasalle", "lat", "latrobe", "law", "lawyer", "lds", "lease", "leclerc",
|
||||||
|
"legal", "lexus", "lgbt", "liaison", "lidl", "life", "lighting", "limited",
|
||||||
|
"limo", "link", "live", "lixil", "loan", "loans", "lol", "london", "lotte",
|
||||||
|
"lotto", "love", "ltda", "lupin", "luxe", "luxury", "madrid", "maif",
|
||||||
|
"maison", "man", "management", "mango", "market", "marketing", "markets",
|
||||||
|
"marriott", "mba", "media", "meet", "melbourne", "meme", "memorial", "men",
|
||||||
|
"menu", "miami", "microsoft", "mil", "mini", "mma", "mobi", "moda", "moe",
|
||||||
|
"mom", "monash", "money", "montblanc", "mormon", "mortgage", "moscow",
|
||||||
|
"motorcycles", "mov", "movie", "movistar", "mtn", "mtpc", "museum",
|
||||||
|
"nadex", "nagoya", "name", "navy", "nec", "net", "netbank", "network",
|
||||||
|
"neustar", "new", "news", "nexus", "ngo", "nhk", "nico", "ninja", "nissan",
|
||||||
|
"nokia", "nra", "nrw", "ntt", "nyc", "office", "okinawa", "omega", "one",
|
||||||
|
"ong", "onl", "online", "ooo", "oracle", "orange", "org", "organic",
|
||||||
|
"osaka", "otsuka", "ovh", "page", "panerai", "paris", "partners", "parts",
|
||||||
|
"party", "pet", "pharmacy", "philips", "photo", "photography", "photos",
|
||||||
|
"physio", "piaget", "pics", "pictet", "pictures", "pink", "pizza", "place",
|
||||||
|
"play", "plumbing", "plus", "pohl", "poker", "porn", "post", "praxi",
|
||||||
|
"press", "pro", "prod", "productions", "prof", "properties", "property",
|
||||||
|
"pub", "qpon", "quebec", "racing", "realtor", "realty", "recipes", "red",
|
||||||
|
"redstone", "rehab", "reise", "reisen", "reit", "ren", "rent", "rentals",
|
||||||
|
"repair", "report", "republican", "rest", "restaurant", "review",
|
||||||
|
"reviews", "rich", "ricoh", "rio", "rip", "rocks", "rodeo", "rsvp", "ruhr",
|
||||||
|
"run", "ryukyu", "saarland", "sakura", "sale", "samsung", "sandvik",
|
||||||
|
"sandvikcoromant", "sanofi", "sap", "sarl", "saxo", "sca", "scb",
|
||||||
|
"schmidt", "scholarships", "school", "schule", "schwarz", "science",
|
||||||
|
"scor", "scot", "seat", "seek", "sener", "services", "sew", "sex", "sexy",
|
||||||
|
"shiksha", "shoes", "show", "shriram", "singles", "site", "ski", "sky",
|
||||||
|
"skype", "sncf", "soccer", "social", "software", "sohu", "solar",
|
||||||
|
"solutions", "sony", "soy", "space", "spiegel", "spreadbetting", "srl",
|
||||||
|
"starhub", "statoil", "studio", "study", "style", "sucks", "supplies",
|
||||||
|
"supply", "support", "surf", "surgery", "suzuki", "swatch", "swiss",
|
||||||
|
"sydney", "systems", "taipei", "tatamotors", "tatar", "tattoo", "tax",
|
||||||
|
"taxi", "team", "tech", "technology", "tel", "telefonica", "temasek",
|
||||||
|
"tennis", "thd", "theater", "tickets", "tienda", "tips", "tires", "tirol",
|
||||||
|
"today", "tokyo", "tools", "top", "toray", "toshiba", "tours", "town",
|
||||||
|
"toyota", "toys", "trade", "trading", "training", "travel", "trust", "tui",
|
||||||
|
"ubs", "university", "uno", "uol", "vacations", "vegas", "ventures",
|
||||||
|
"vermögensberater", "vermögensberatung", "versicherung", "vet", "viajes",
|
||||||
|
"video", "villas", "vin", "vision", "vista", "vistaprint", "vlaanderen",
|
||||||
|
"vodka", "vote", "voting", "voto", "voyage", "wales", "walter", "wang",
|
||||||
|
"watch", "webcam", "website", "wed", "wedding", "weir", "whoswho", "wien",
|
||||||
|
"wiki", "williamhill", "win", "windows", "wine", "wme", "work", "works",
|
||||||
|
"world", "wtc", "wtf", "xbox", "xerox", "xin", "xperia", "xxx", "xyz",
|
||||||
|
"yachts", "yandex", "yodobashi", "yoga", "yokohama", "youtube", "zip",
|
||||||
|
"zone", "zuerich", "дети", "ком", "москва", "онлайн", "орг", "рус", "сайт",
|
||||||
|
"קום", "بازار", "شبكة", "كوم", "موقع", "कॉम", "नेट", "संगठन", "คอม",
|
||||||
|
"みんな", "グーグル", "コム", "世界", "中信", "中文网", "企业", "佛山", "信息",
|
||||||
|
"健康", "八卦", "公司", "公益", "商城", "商店", "商标", "在线", "大拿", "娱乐",
|
||||||
|
"工行", "广东", "慈善", "我爱你", "手机", "政务", "政府", "新闻", "时尚", "机构",
|
||||||
|
"淡马锡", "游戏", "点看", "移动", "组织机构", "网址", "网店", "网络", "谷歌", "集团",
|
||||||
|
"飞利浦", "餐厅", "닷넷", "닷컴", "삼성", "onion"]
|
||||||
|
|
||||||
|
URL_REGEXP = re.compile(r'(?i)((?:https?://|www\\.)*(?:[\w+-_]+[.])(?:' + r'\b|'.join(TLDS) + r'\b|(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5]))+(?:[:\w+\/]?[a-z0-9!\*\'\(\);:&=\+\$/%#\[\]\-_\.,~?])*)', re.UNICODE)
|
||||||
|
|
||||||
|
|
||||||
|
def calc_expected_status_length(status, short_url_length=23):
|
||||||
|
""" Calculates the length of a tweet, taking into account Twitter's
|
||||||
|
replacement of URLs with https://t.co links.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
status: text of the status message to be posted.
|
||||||
|
short_url_length: the current published https://t.co links
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Expected length of the status message as an integer.
|
||||||
|
|
||||||
|
"""
|
||||||
|
replaced_chars = 0
|
||||||
|
status_length = len(status)
|
||||||
|
match = re.findall(URL_REGEXP, status)
|
||||||
|
if len(match) >= 1:
|
||||||
|
replaced_chars = len(''.join(match))
|
||||||
|
status_length = status_length - replaced_chars + (short_url_length * len(match))
|
||||||
|
return status_length
|
||||||
|
|
||||||
|
|
||||||
|
def is_url(text):
|
||||||
|
""" Checks to see if a bit of text is a URL.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: text to check.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Boolean of whether the text should be treated as a URL or not.
|
||||||
|
"""
|
||||||
|
if re.findall(URL_REGEXP, text):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def http_to_file(http):
|
||||||
|
data_file = NamedTemporaryFile()
|
||||||
|
req = requests.get(http, stream=True)
|
||||||
|
data_file.write(req.raw.data)
|
||||||
|
return data_file
|
||||||
|
|
||||||
|
|
||||||
|
def parse_media_file(passed_media):
|
||||||
|
""" Parses a media file and attempts to return a file-like object and
|
||||||
|
information about the media file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
passed_media: media file which to parse.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
file-like object, the filename of the media file, the file size, and
|
||||||
|
the type of media.
|
||||||
|
"""
|
||||||
|
img_formats = ['image/jpeg',
|
||||||
|
'image/png',
|
||||||
|
'image/gif',
|
||||||
|
'image/bmp',
|
||||||
|
'image/webp']
|
||||||
|
video_formats = ['video/mp4']
|
||||||
|
|
||||||
|
# If passed_media is a string, check if it points to a URL, otherwise,
|
||||||
|
# it should point to local file. Create a reference to a file obj for
|
||||||
|
# each case such that data_file ends up with a read() method.
|
||||||
|
if not hasattr(passed_media, 'read'):
|
||||||
|
if passed_media.startswith('http'):
|
||||||
|
data_file = http_to_file(passed_media)
|
||||||
|
filename = os.path.basename(passed_media)
|
||||||
|
else:
|
||||||
|
data_file = open(os.path.realpath(passed_media), 'rb')
|
||||||
|
filename = os.path.basename(passed_media)
|
||||||
|
|
||||||
|
# Otherwise, if a file object was passed in the first place,
|
||||||
|
# create the standard reference to media_file (i.e., rename it to fp).
|
||||||
|
else:
|
||||||
|
if passed_media.mode != 'rb':
|
||||||
|
raise TwitterError({'message': 'File mode must be "rb".'})
|
||||||
|
filename = os.path.basename(passed_media.name)
|
||||||
|
data_file = passed_media
|
||||||
|
|
||||||
|
data_file.seek(0, 2)
|
||||||
|
file_size = data_file.tell()
|
||||||
|
|
||||||
|
try:
|
||||||
|
data_file.seek(0)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
media_type = mimetypes.guess_type(os.path.basename(filename))[0]
|
||||||
|
if media_type in img_formats and file_size > 5 * 1048576:
|
||||||
|
raise TwitterError({'message': 'Images must be less than 5MB.'})
|
||||||
|
elif media_type in video_formats and file_size > 15 * 1048576:
|
||||||
|
raise TwitterError({'message': 'Videos must be less than 15MB.'})
|
||||||
|
elif media_type not in img_formats and media_type not in video_formats:
|
||||||
|
raise TwitterError({'message': 'Media type could not be determined.'})
|
||||||
|
|
||||||
|
return data_file, filename, file_size, media_type
|
||||||
|
|
||||||
|
|
||||||
|
def enf_type(field, _type, val):
|
||||||
|
""" Checks to see if a given val for a field (i.e., the name of the field)
|
||||||
|
is of the proper _type. If it is not, raises a TwitterError with a brief
|
||||||
|
explanation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
field:
|
||||||
|
Name of the field you are checking.
|
||||||
|
_type:
|
||||||
|
Type that the value should be returned as.
|
||||||
|
val:
|
||||||
|
Value to convert to _type.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
val converted to type _type.
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return _type(val)
|
||||||
|
except ValueError:
|
||||||
|
raise TwitterError({
|
||||||
|
'message': '"{0}" must be type {1}'.format(field, _type.__name__)
|
||||||
|
})
|
|
@ -464,6 +464,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'TWITTER_ACCESS_TOKEN_SECRET': (str, 'Twitter', ''),
|
'TWITTER_ACCESS_TOKEN_SECRET': (str, 'Twitter', ''),
|
||||||
'TWITTER_CONSUMER_KEY': (str, 'Twitter', ''),
|
'TWITTER_CONSUMER_KEY': (str, 'Twitter', ''),
|
||||||
'TWITTER_CONSUMER_SECRET': (str, 'Twitter', ''),
|
'TWITTER_CONSUMER_SECRET': (str, 'Twitter', ''),
|
||||||
|
'TWITTER_INCL_POSTER': (int, 'Twitter', 1),
|
||||||
'TWITTER_INCL_SUBJECT': (int, 'Twitter', 1),
|
'TWITTER_INCL_SUBJECT': (int, 'Twitter', 1),
|
||||||
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
|
'TWITTER_ON_PLAY': (int, 'Twitter', 0),
|
||||||
'TWITTER_ON_STOP': (int, 'Twitter', 0),
|
'TWITTER_ON_STOP': (int, 'Twitter', 0),
|
||||||
|
|
|
@ -38,8 +38,8 @@ from urlparse import parse_qsl
|
||||||
from pynma import pynma
|
from pynma import pynma
|
||||||
import gntp.notifier
|
import gntp.notifier
|
||||||
import oauth2 as oauth
|
import oauth2 as oauth
|
||||||
import pythontwitter as twitter
|
import twitter
|
||||||
import pythonfacebook as facebook
|
import facebook
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
import database
|
import database
|
||||||
|
@ -526,8 +526,8 @@ def send_notification(agent_id, subject, body, notify_action, **kwargs):
|
||||||
email = Email()
|
email = Email()
|
||||||
return email.notify(subject=subject, message=body)
|
return email.notify(subject=subject, message=body)
|
||||||
elif agent_id == 11:
|
elif agent_id == 11:
|
||||||
tweet = TwitterNotifier()
|
twitter = TwitterNotifier()
|
||||||
return tweet.notify(subject=subject, message=body)
|
return twitter.notify(subject=subject, message=body, **kwargs)
|
||||||
elif agent_id == 12:
|
elif agent_id == 12:
|
||||||
iftttClient = IFTTT()
|
iftttClient = IFTTT()
|
||||||
return iftttClient.notify(subject=subject, message=body, action=notify_action)
|
return iftttClient.notify(subject=subject, message=body, action=notify_action)
|
||||||
|
@ -1257,16 +1257,22 @@ class TwitterNotifier(object):
|
||||||
self.access_token_secret = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
|
self.access_token_secret = plexpy.CONFIG.TWITTER_ACCESS_TOKEN_SECRET
|
||||||
self.consumer_key = plexpy.CONFIG.TWITTER_CONSUMER_KEY
|
self.consumer_key = plexpy.CONFIG.TWITTER_CONSUMER_KEY
|
||||||
self.consumer_secret = plexpy.CONFIG.TWITTER_CONSUMER_SECRET
|
self.consumer_secret = plexpy.CONFIG.TWITTER_CONSUMER_SECRET
|
||||||
|
self.incl_poster = plexpy.CONFIG.TWITTER_INCL_POSTER
|
||||||
self.incl_subject = plexpy.CONFIG.TWITTER_INCL_SUBJECT
|
self.incl_subject = plexpy.CONFIG.TWITTER_INCL_SUBJECT
|
||||||
|
|
||||||
def notify(self, subject, message):
|
def notify(self, subject, message, **kwargs):
|
||||||
if not subject or not message:
|
if not subject or not message:
|
||||||
return
|
return
|
||||||
else:
|
|
||||||
|
poster_url = ''
|
||||||
|
if self.incl_poster and 'metadata' in kwargs:
|
||||||
|
metadata = kwargs['metadata']
|
||||||
|
poster_url = metadata.get('poster_url','')
|
||||||
|
|
||||||
if self.incl_subject:
|
if self.incl_subject:
|
||||||
self._send_tweet(subject + ': ' + message)
|
self._send_tweet(subject + ': ' + message, attachment=poster_url)
|
||||||
else:
|
else:
|
||||||
self._send_tweet(message)
|
self._send_tweet(message, attachment=poster_url)
|
||||||
|
|
||||||
def test_notify(self):
|
def test_notify(self):
|
||||||
return self._send_tweet("This is a test notification from PlexPy at " + helpers.now())
|
return self._send_tweet("This is a test notification from PlexPy at " + helpers.now())
|
||||||
|
@ -1324,7 +1330,7 @@ class TwitterNotifier(object):
|
||||||
plexpy.CONFIG.write()
|
plexpy.CONFIG.write()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _send_tweet(self, message=None):
|
def _send_tweet(self, message=None, attachment=None):
|
||||||
consumer_key = self.consumer_key
|
consumer_key = self.consumer_key
|
||||||
consumer_secret = self.consumer_secret
|
consumer_secret = self.consumer_secret
|
||||||
access_token = self.access_token
|
access_token = self.access_token
|
||||||
|
@ -1335,7 +1341,7 @@ class TwitterNotifier(object):
|
||||||
api = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
|
api = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
api.PostUpdate(message)
|
api.PostUpdate(message, media=attachment)
|
||||||
logger.info(u"PlexPy Notifiers :: Twitter notification sent.")
|
logger.info(u"PlexPy Notifiers :: Twitter notification sent.")
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1376,6 +1382,12 @@ class TwitterNotifier(object):
|
||||||
'description': 'Your Twitter access token secret.',
|
'description': 'Your Twitter access token secret.',
|
||||||
'input_type': 'text'
|
'input_type': 'text'
|
||||||
},
|
},
|
||||||
|
{'label': 'Include Poster Image',
|
||||||
|
'value': self.incl_poster,
|
||||||
|
'name': 'twitter_incl_poster',
|
||||||
|
'description': 'Include a poster with the notifications.',
|
||||||
|
'input_type': 'checkbox'
|
||||||
|
},
|
||||||
{'label': 'Include Subject Line',
|
{'label': 'Include Subject Line',
|
||||||
'value': self.incl_subject,
|
'value': self.incl_subject,
|
||||||
'name': 'twitter_incl_subject',
|
'name': 'twitter_incl_subject',
|
||||||
|
@ -2331,49 +2343,6 @@ class FacebookNotifier(object):
|
||||||
def notify(self, subject, message, **kwargs):
|
def notify(self, subject, message, **kwargs):
|
||||||
if not subject or not message:
|
if not subject or not message:
|
||||||
return
|
return
|
||||||
else:
|
|
||||||
if self.incl_subject:
|
|
||||||
self._post_facebook(subject + ': ' + message, **kwargs)
|
|
||||||
else:
|
|
||||||
self._post_facebook(message, **kwargs)
|
|
||||||
|
|
||||||
def test_notify(self):
|
|
||||||
return self._post_facebook(u"PlexPy Notifiers :: This is a test notification from PlexPy at " + helpers.now())
|
|
||||||
|
|
||||||
def _get_authorization(self):
|
|
||||||
return facebook.auth_url(app_id=self.app_id,
|
|
||||||
canvas_url=self.redirect_uri + '/facebookStep2',
|
|
||||||
perms=['user_managed_groups','publish_actions'])
|
|
||||||
|
|
||||||
def _get_credentials(self, code):
|
|
||||||
logger.info(u"PlexPy Notifiers :: Requesting access token from Facebook")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Request user access token
|
|
||||||
api = facebook.GraphAPI(version='2.5')
|
|
||||||
response = api.get_access_token_from_code(code=code,
|
|
||||||
redirect_uri=self.redirect_uri + '/facebookStep2',
|
|
||||||
app_id=self.app_id,
|
|
||||||
app_secret=self.app_secret)
|
|
||||||
access_token = response['access_token']
|
|
||||||
|
|
||||||
# Request extended user access token
|
|
||||||
api = facebook.GraphAPI(access_token=access_token, version='2.5')
|
|
||||||
response = api.extend_access_token(app_id=self.app_id,
|
|
||||||
app_secret=self.app_secret)
|
|
||||||
access_token = response['access_token']
|
|
||||||
|
|
||||||
plexpy.CONFIG.FACEBOOK_TOKEN = access_token
|
|
||||||
plexpy.CONFIG.write()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(u"PlexPy Notifiers :: Error requesting Facebook access token: %s" % e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _post_facebook(self, message=None, **kwargs):
|
|
||||||
if self.group_id:
|
|
||||||
api = facebook.GraphAPI(access_token=self.access_token, version='2.5')
|
|
||||||
|
|
||||||
attachment = {}
|
attachment = {}
|
||||||
|
|
||||||
|
@ -2453,6 +2422,49 @@ class FacebookNotifier(object):
|
||||||
attachment['name'] = title
|
attachment['name'] = title
|
||||||
attachment['description'] = subtitle
|
attachment['description'] = subtitle
|
||||||
|
|
||||||
|
if self.incl_subject:
|
||||||
|
self._post_facebook(subject + ': ' + message, attachment=attachment)
|
||||||
|
else:
|
||||||
|
self._post_facebook(message, attachment=attachment)
|
||||||
|
|
||||||
|
def test_notify(self):
|
||||||
|
return self._post_facebook(u"PlexPy Notifiers :: This is a test notification from PlexPy at " + helpers.now())
|
||||||
|
|
||||||
|
def _get_authorization(self):
|
||||||
|
return facebook.auth_url(app_id=self.app_id,
|
||||||
|
canvas_url=self.redirect_uri + '/facebookStep2',
|
||||||
|
perms=['user_managed_groups','publish_actions'])
|
||||||
|
|
||||||
|
def _get_credentials(self, code):
|
||||||
|
logger.info(u"PlexPy Notifiers :: Requesting access token from Facebook")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Request user access token
|
||||||
|
api = facebook.GraphAPI(version='2.5')
|
||||||
|
response = api.get_access_token_from_code(code=code,
|
||||||
|
redirect_uri=self.redirect_uri + '/facebookStep2',
|
||||||
|
app_id=self.app_id,
|
||||||
|
app_secret=self.app_secret)
|
||||||
|
access_token = response['access_token']
|
||||||
|
|
||||||
|
# Request extended user access token
|
||||||
|
api = facebook.GraphAPI(access_token=access_token, version='2.5')
|
||||||
|
response = api.extend_access_token(app_id=self.app_id,
|
||||||
|
app_secret=self.app_secret)
|
||||||
|
access_token = response['access_token']
|
||||||
|
|
||||||
|
plexpy.CONFIG.FACEBOOK_TOKEN = access_token
|
||||||
|
plexpy.CONFIG.write()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(u"PlexPy Notifiers :: Error requesting Facebook access token: %s" % e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _post_facebook(self, message=None, attachment=None):
|
||||||
|
if self.group_id:
|
||||||
|
api = facebook.GraphAPI(access_token=self.access_token, version='2.5')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
api.put_wall_post(profile_id=self.group_id, message=message, attachment=attachment)
|
api.put_wall_post(profile_id=self.group_id, message=message, attachment=attachment)
|
||||||
logger.info(u"PlexPy Notifiers :: Facebook notification sent.")
|
logger.info(u"PlexPy Notifiers :: Facebook notification sent.")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue