Remove unicode strings

This commit is contained in:
JonnyWong16 2019-11-23 14:37:26 -08:00
parent 1c18e72539
commit c279057f91
28 changed files with 834 additions and 847 deletions

View file

@ -1,8 +1,4 @@
#!/bin/sh
''''which python >/dev/null 2>&1 && exec python "$0" "$@" # '''
''''which python2 >/dev/null 2>&1 && exec python2 "$0" "$@" # '''
''''which python2.7 >/dev/null 2>&1 && exec python2.7 "$0" "$@" # '''
''''exec echo "Error: Python not found!" # '''
#!/usr/bin/env python
# -*- coding: utf-8 -*-
@ -21,6 +17,8 @@
# You should have received a copy of the GNU General Public License
# along with Tautulli. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import os
import sys
@ -122,7 +120,7 @@ def main():
if args.dev:
plexpy.DEV = True
logger.debug(u"Tautulli is running in the dev environment.")
logger.debug("Tautulli is running in the dev environment.")
if args.daemon:
if sys.platform == 'win32':

View file

@ -137,8 +137,7 @@ def initialize(config_file):
return False
if CONFIG.HTTP_PORT < 21 or CONFIG.HTTP_PORT > 65535:
plexpy.logger.warn(
u"HTTP_PORT out of bounds: 21 < %s < 65535", CONFIG.HTTP_PORT)
plexpy.logger.warn("HTTP_PORT out of bounds: 21 < %s < 65535", CONFIG.HTTP_PORT)
CONFIG.HTTP_PORT = 8181
if not CONFIG.HTTPS_CERT:
@ -155,26 +154,26 @@ def initialize(config_file):
logger.initLogger(console=not QUIET, log_dir=CONFIG.LOG_DIR if log_writable else None,
verbose=VERBOSE)
logger.info(u"Starting Tautulli {}".format(
logger.info("Starting Tautulli {}".format(
common.RELEASE
))
logger.info(u"{}{} {} ({}{})".format(
logger.info("{}{} {} ({}{})".format(
'[Docker] ' if DOCKER else '', common.PLATFORM, common.PLATFORM_RELEASE, common.PLATFORM_VERSION,
' - {}'.format(common.PLATFORM_LINUX_DISTRO) if common.PLATFORM_LINUX_DISTRO else ''
))
logger.info(u"{} (UTC{})".format(
logger.info("{} (UTC{})".format(
plexpy.SYS_TIMEZONE.zone, plexpy.SYS_UTC_OFFSET
))
logger.info(u"Python {}".format(
logger.info("Python {}".format(
sys.version
))
logger.info(u"Program Dir: {}".format(
logger.info("Program Dir: {}".format(
PROG_DIR
))
logger.info(u"Config File: {}".format(
logger.info("Config File: {}".format(
CONFIG_FILE
))
logger.info(u"Database File: {}".format(
logger.info("Database File: {}".format(
DB_FILE
))
@ -186,18 +185,18 @@ def initialize(config_file):
CONFIG.NEWSLETTER_DIR, os.path.join(DATA_DIR, 'newsletters'), 'newsletters')
# Initialize the database
logger.info(u"Checking if the database upgrades are required...")
logger.info("Checking if the database upgrades are required...")
try:
dbcheck()
except Exception as e:
logger.error(u"Can't connect to the database: %s" % e)
logger.error("Can't connect to the database: %s" % e)
# Perform upgrades
logger.info(u"Checking if configuration upgrades are required...")
logger.info("Checking if configuration upgrades are required...")
try:
upgrade()
except Exception as e:
logger.error(u"Could not perform upgrades: %s" % e)
logger.error("Could not perform upgrades: %s" % e)
# Add notifier configs to logger blacklist
newsletters.blacklist_logger()
@ -206,19 +205,19 @@ def initialize(config_file):
# Check if Tautulli has a uuid
if CONFIG.PMS_UUID == '' or not CONFIG.PMS_UUID:
logger.debug(u"Generating UUID...")
logger.debug("Generating UUID...")
CONFIG.PMS_UUID = generate_uuid()
CONFIG.write()
# Check if Tautulli has an API key
if CONFIG.API_KEY == '':
logger.debug(u"Generating API key...")
logger.debug("Generating API key...")
CONFIG.API_KEY = generate_uuid()
CONFIG.write()
# Check if Tautulli has a jwt_secret
if CONFIG.JWT_SECRET == '' or not CONFIG.JWT_SECRET or CONFIG.JWT_UPDATE_SECRET:
logger.debug(u"Generating JWT secret...")
logger.debug("Generating JWT secret...")
CONFIG.JWT_SECRET = generate_uuid()
CONFIG.JWT_UPDATE_SECRET = False
CONFIG.write()
@ -231,7 +230,7 @@ def initialize(config_file):
with open(version_lock_file, "r") as fp:
prev_version = fp.read()
except IOError as e:
logger.error(u"Unable to read previous version from file '%s': %s" %
logger.error("Unable to read previous version from file '%s': %s" %
(version_lock_file, e))
else:
prev_version = 'cfd30996264b7e9fe4ef87f02d1cc52d1ae8bfca'
@ -248,7 +247,7 @@ def initialize(config_file):
with open(version_lock_file, "w") as fp:
fp.write(CURRENT_VERSION)
except IOError as e:
logger.error(u"Unable to write current version to file '%s': %s" %
logger.error("Unable to write current version to file '%s': %s" %
(version_lock_file, e))
# Check for new versions
@ -256,7 +255,7 @@ def initialize(config_file):
try:
versioncheck.check_update()
except:
logger.exception(u"Unhandled exception")
logger.exception("Unhandled exception")
LATEST_VERSION = CURRENT_VERSION
else:
LATEST_VERSION = CURRENT_VERSION
@ -269,7 +268,7 @@ def initialize(config_file):
with open(release_file, "r") as fp:
PREV_RELEASE = fp.read()
except IOError as e:
logger.error(u"Unable to read previous release from file '%s': %s" %
logger.error("Unable to read previous release from file '%s': %s" %
(release_file, e))
elif prev_version == 'cfd30996264b7e9fe4ef87f02d1cc52d1ae8bfca': # Commit hash for v1.4.25
PREV_RELEASE = 'v1.4.25'
@ -285,7 +284,7 @@ def initialize(config_file):
with open(release_file, "w") as fp:
fp.write(common.RELEASE)
except IOError as e:
logger.error(u"Unable to write current release to file '%s': %s" %
logger.error("Unable to write current release to file '%s': %s" %
(release_file, e))
# Get the real PMS urls for SSL and remote access
@ -311,7 +310,7 @@ def initialize(config_file):
def daemonize():
if threading.activeCount() != 1:
logger.warn(
u"There are %r active threads. Daemonizing may cause"
"There are %r active threads. Daemonizing may cause"
" strange behavior.",
threading.enumerate())
@ -352,10 +351,10 @@ def daemonize():
os.dup2(se.fileno(), sys.stderr.fileno())
pid = os.getpid()
logger.info(u"Daemonized to PID: %d", pid)
logger.info("Daemonized to PID: %d", pid)
if CREATEPID:
logger.info(u"Writing PID %d to %s", pid, PIDFILE)
logger.info("Writing PID %d to %s", pid, PIDFILE)
with file(PIDFILE, 'w') as fp:
fp.write("%s\n" % pid)
@ -373,7 +372,7 @@ def launch_browser(host, port, root):
try:
webbrowser.open('%s://%s:%i%s' % (protocol, host, port, root))
except Exception as e:
logger.error(u"Could not launch browser: %s" % e)
logger.error("Could not launch browser: %s" % e)
def win_system_tray():
@ -411,13 +410,13 @@ def win_system_tray():
('Update', None, tray_update, None),
('Restart', None, tray_restart, None))
logger.info(u"Launching system tray icon.")
logger.info("Launching system tray icon.")
try:
plexpy.WIN_SYS_TRAY_ICON = SysTrayIcon(icon, hover_text, menu_options, on_quit=tray_quit)
plexpy.WIN_SYS_TRAY_ICON.start()
except Exception as e:
logger.error(u"Unable to launch system tray icon: %s." % e)
logger.error("Unable to launch system tray icon: %s." % e)
plexpy.WIN_SYS_TRAY_ICON = None
@ -509,15 +508,15 @@ def schedule_job(func, name, hours=0, minutes=0, seconds=0, args=None):
if job:
if hours == 0 and minutes == 0 and seconds == 0:
SCHED.remove_job(name)
logger.info(u"Removed background task: %s", name)
logger.info("Removed background task: %s", name)
elif job.trigger.interval != datetime.timedelta(hours=hours, minutes=minutes):
SCHED.reschedule_job(name, trigger=IntervalTrigger(
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC), args=args)
logger.info(u"Re-scheduled background task: %s", name)
logger.info("Re-scheduled background task: %s", name)
elif hours > 0 or minutes > 0 or seconds > 0:
SCHED.add_job(func, id=name, trigger=IntervalTrigger(
hours=hours, minutes=minutes, seconds=seconds, timezone=pytz.UTC), args=args)
logger.info(u"Scheduled background task: %s", name)
logger.info("Scheduled background task: %s", name)
def start():
@ -561,7 +560,7 @@ def start():
def sig_handler(signum=None, frame=None):
if signum is not None:
logger.info(u"Signal %i caught, saving and exiting...", signum)
logger.info("Signal %i caught, saving and exiting...", signum)
shutdown()
@ -781,7 +780,7 @@ def dbcheck():
try:
c_db.execute('SELECT started FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN started INTEGER'
)
@ -802,7 +801,7 @@ def dbcheck():
try:
c_db.execute('SELECT title FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN title TEXT'
)
@ -826,7 +825,7 @@ def dbcheck():
try:
c_db.execute('SELECT ip_address FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN ip_address TEXT'
)
@ -907,7 +906,7 @@ def dbcheck():
try:
c_db.execute('SELECT buffer_count FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0'
)
@ -919,7 +918,7 @@ def dbcheck():
try:
c_db.execute('SELECT last_paused FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN last_paused INTEGER'
)
@ -928,7 +927,7 @@ def dbcheck():
try:
c_db.execute('SELECT section_id FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN section_id INTEGER'
)
@ -937,7 +936,7 @@ def dbcheck():
try:
c_db.execute('SELECT stopped FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN stopped INTEGER'
)
@ -946,7 +945,7 @@ def dbcheck():
try:
c_db.execute('SELECT transcode_key FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN transcode_key TEXT'
)
@ -955,7 +954,7 @@ def dbcheck():
try:
c_db.execute('SELECT write_attempts FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN write_attempts INTEGER DEFAULT 0'
)
@ -964,7 +963,7 @@ def dbcheck():
try:
c_db.execute('SELECT transcode_decision FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN transcode_decision TEXT'
)
@ -994,7 +993,7 @@ def dbcheck():
try:
c_db.execute('SELECT raw_stream_info FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN product TEXT'
)
@ -1084,7 +1083,7 @@ def dbcheck():
try:
c_db.execute('SELECT video_height FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN video_height INTEGER'
)
@ -1093,7 +1092,7 @@ def dbcheck():
try:
c_db.execute('SELECT subtitles FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN subtitles INTEGER'
)
@ -1102,7 +1101,7 @@ def dbcheck():
try:
c_db.execute('SELECT synced_version_profile FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN synced_version_profile TEXT'
)
@ -1114,7 +1113,7 @@ def dbcheck():
try:
c_db.execute('SELECT transcode_hw_decoding FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN transcode_hw_decoding INTEGER'
)
@ -1126,7 +1125,7 @@ def dbcheck():
try:
c_db.execute('SELECT watched FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN watched INTEGER DEFAULT 0'
)
@ -1135,7 +1134,7 @@ def dbcheck():
try:
c_db.execute('SELECT live FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN live INTEGER'
)
@ -1147,7 +1146,7 @@ def dbcheck():
try:
c_db.execute('SELECT session_id FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN session_id TEXT'
)
@ -1156,7 +1155,7 @@ def dbcheck():
try:
c_db.execute('SELECT original_title FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN original_title TEXT'
)
@ -1165,7 +1164,7 @@ def dbcheck():
try:
c_db.execute('SELECT secure FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN secure INTEGER'
)
@ -1177,7 +1176,7 @@ def dbcheck():
try:
c_db.execute('SELECT rating_key_websocket FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT'
)
@ -1186,7 +1185,7 @@ def dbcheck():
try:
c_db.execute('SELECT video_scan_type FROM sessions')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table sessions.")
logger.debug("Altering database. Updating database table sessions.")
c_db.execute(
'ALTER TABLE sessions ADD COLUMN video_scan_type TEXT'
)
@ -1204,7 +1203,7 @@ def dbcheck():
try:
c_db.execute('SELECT reference_id FROM session_history')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history.")
logger.debug("Altering database. Updating database table session_history.")
c_db.execute(
'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0'
)
@ -1227,7 +1226,7 @@ def dbcheck():
try:
c_db.execute('SELECT bandwidth FROM session_history')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history.")
logger.debug("Altering database. Updating database table session_history.")
c_db.execute(
'ALTER TABLE session_history ADD COLUMN platform_version TEXT'
)
@ -1254,7 +1253,7 @@ def dbcheck():
try:
c_db.execute('SELECT secure FROM session_history')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history.")
logger.debug("Altering database. Updating database table session_history.")
c_db.execute(
'ALTER TABLE session_history ADD COLUMN secure INTEGER'
)
@ -1266,7 +1265,7 @@ def dbcheck():
try:
c_db.execute('SELECT full_title FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_metadata.")
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT'
)
@ -1275,7 +1274,7 @@ def dbcheck():
try:
c_db.execute('SELECT tagline FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_metadata.")
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT'
)
@ -1284,7 +1283,7 @@ def dbcheck():
try:
c_db.execute('SELECT section_id FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_metadata.")
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN section_id INTEGER'
)
@ -1293,7 +1292,7 @@ def dbcheck():
try:
c_db.execute('SELECT labels FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_metadata.")
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN labels TEXT'
)
@ -1302,7 +1301,7 @@ def dbcheck():
try:
c_db.execute('SELECT original_title FROM session_history_metadata')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_metadata.")
logger.debug("Altering database. Updating database table session_history_metadata.")
c_db.execute(
'ALTER TABLE session_history_metadata ADD COLUMN original_title TEXT'
)
@ -1311,7 +1310,7 @@ def dbcheck():
try:
c_db.execute('SELECT transcode_decision FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT'
)
@ -1326,7 +1325,7 @@ def dbcheck():
try:
c_db.execute('SELECT subtitles FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN video_bit_depth INTEGER'
)
@ -1446,7 +1445,7 @@ def dbcheck():
try:
c_db.execute('SELECT subtitle_codec FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN subtitle_codec TEXT '
)
@ -1455,7 +1454,7 @@ def dbcheck():
try:
c_db.execute('SELECT synced_version_profile FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN synced_version_profile TEXT '
)
@ -1467,7 +1466,7 @@ def dbcheck():
try:
c_db.execute('SELECT transcode_hw_decoding FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decoding INTEGER '
)
@ -1483,7 +1482,7 @@ def dbcheck():
result = c_db.execute('SELECT stream_container FROM session_history_media_info '
'WHERE stream_container IS NULL').fetchall()
if len(result) > 0:
logger.debug(u"Altering database. Removing NULL values from session_history_media_info table.")
logger.debug("Altering database. Removing NULL values from session_history_media_info table.")
c_db.execute(
'UPDATE session_history_media_info SET stream_container = "" WHERE stream_container IS NULL '
)
@ -1497,13 +1496,13 @@ def dbcheck():
'UPDATE session_history_media_info SET stream_subtitle_codec = "" WHERE stream_subtitle_codec IS NULL '
)
except sqlite3.OperationalError:
logger.warn(u"Unable to remove NULL values from session_history_media_info table.")
logger.warn("Unable to remove NULL values from session_history_media_info table.")
# Upgrade session_history_media_info table from earlier versions
try:
c_db.execute('SELECT video_scan_type FROM session_history_media_info')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table session_history_media_info.")
logger.debug("Altering database. Updating database table session_history_media_info.")
c_db.execute(
'ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT'
)
@ -1542,7 +1541,7 @@ def dbcheck():
try:
c_db.execute('SELECT do_notify FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1'
)
@ -1551,7 +1550,7 @@ def dbcheck():
try:
c_db.execute('SELECT keep_history FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1'
)
@ -1560,7 +1559,7 @@ def dbcheck():
try:
c_db.execute('SELECT custom_avatar_url FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT'
)
@ -1569,7 +1568,7 @@ def dbcheck():
try:
c_db.execute('SELECT deleted_user FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0'
)
@ -1578,7 +1577,7 @@ def dbcheck():
try:
c_db.execute('SELECT allow_guest FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN allow_guest INTEGER DEFAULT 0'
)
@ -1593,7 +1592,7 @@ def dbcheck():
try:
c_db.execute('SELECT shared_libraries FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN shared_libraries TEXT'
)
@ -1617,7 +1616,7 @@ def dbcheck():
try:
c_db.execute('SELECT is_admin FROM users')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table users.")
logger.debug("Altering database. Updating database table users.")
c_db.execute(
'ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0'
)
@ -1626,7 +1625,7 @@ def dbcheck():
try:
c_db.execute('SELECT poster_url FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
logger.debug("Altering database. Updating database table notify_log.")
c_db.execute(
'ALTER TABLE notify_log ADD COLUMN poster_url TEXT'
)
@ -1635,7 +1634,7 @@ def dbcheck():
try:
c_db.execute('SELECT timestamp FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
logger.debug("Altering database. Updating database table notify_log.")
c_db.execute(
'CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, '
'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, '
@ -1673,7 +1672,7 @@ def dbcheck():
try:
c_db.execute('SELECT notifier_id FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
logger.debug("Altering database. Updating database table notify_log.")
c_db.execute(
'ALTER TABLE notify_log ADD COLUMN notifier_id INTEGER'
)
@ -1682,7 +1681,7 @@ def dbcheck():
try:
c_db.execute('SELECT success FROM notify_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notify_log.")
logger.debug("Altering database. Updating database table notify_log.")
c_db.execute(
'ALTER TABLE notify_log ADD COLUMN success INTEGER DEFAULT 0'
)
@ -1694,7 +1693,7 @@ def dbcheck():
try:
c_db.execute('SELECT start_time FROM newsletter_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table newsletter_log.")
logger.debug("Altering database. Updating database table newsletter_log.")
c_db.execute(
'ALTER TABLE newsletter_log ADD COLUMN start_time INTEGER'
)
@ -1706,7 +1705,7 @@ def dbcheck():
try:
c_db.execute('SELECT filename FROM newsletter_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table newsletter_log.")
logger.debug("Altering database. Updating database table newsletter_log.")
c_db.execute(
'ALTER TABLE newsletter_log ADD COLUMN filename TEXT'
)
@ -1715,7 +1714,7 @@ def dbcheck():
try:
c_db.execute('SELECT email_msg_id FROM newsletter_log')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table newsletter_log.")
logger.debug("Altering database. Updating database table newsletter_log.")
c_db.execute(
'ALTER TABLE newsletter_log ADD COLUMN email_msg_id TEXT'
)
@ -1724,7 +1723,7 @@ def dbcheck():
try:
c_db.execute('SELECT id_name FROM newsletters')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table newsletters.")
logger.debug("Altering database. Updating database table newsletters.")
c_db.execute(
'ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL DEFAULT ""'
)
@ -1733,7 +1732,7 @@ def dbcheck():
try:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone()
if 'section_id INTEGER UNIQUE' in result[0]:
logger.debug(u"Altering database. Removing unique constraint on section_id from library_sections table.")
logger.debug("Altering database. Removing unique constraint on section_id from library_sections table.")
c_db.execute(
'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, '
@ -1757,7 +1756,7 @@ def dbcheck():
'ALTER TABLE library_sections_temp RENAME TO library_sections'
)
except sqlite3.OperationalError:
logger.warn(u"Unable to remove section_id unique constraint from library_sections.")
logger.warn("Unable to remove section_id unique constraint from library_sections.")
try:
c_db.execute(
'DROP TABLE library_sections_temp'
@ -1769,18 +1768,18 @@ def dbcheck():
try:
result = c_db.execute('SELECT * FROM library_sections WHERE server_id = ""').fetchall()
if len(result) > 0:
logger.debug(u"Altering database. Removing duplicate libraries from library_sections table.")
logger.debug("Altering database. Removing duplicate libraries from library_sections table.")
c_db.execute(
'DELETE FROM library_sections WHERE server_id = ""'
)
except sqlite3.OperationalError:
logger.warn(u"Unable to remove duplicate libraries from library_sections table.")
logger.warn("Unable to remove duplicate libraries from library_sections table.")
# Upgrade library_sections table from earlier versions
try:
c_db.execute('SELECT agent FROM library_sections')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table library_sections.")
logger.debug("Altering database. Updating database table library_sections.")
c_db.execute(
'ALTER TABLE library_sections ADD COLUMN agent TEXT'
)
@ -1789,7 +1788,7 @@ def dbcheck():
try:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone()
if 'username TEXT NOT NULL UNIQUE' in result[0]:
logger.debug(u"Altering database. Removing unique constraint on username from users table.")
logger.debug("Altering database. Removing unique constraint on username from users table.")
c_db.execute(
'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, '
@ -1811,7 +1810,7 @@ def dbcheck():
'ALTER TABLE users_temp RENAME TO users'
)
except sqlite3.OperationalError:
logger.warn(u"Unable to remove username unique constraint from users.")
logger.warn("Unable to remove username unique constraint from users.")
try:
c_db.execute(
'DROP TABLE users_temp'
@ -1823,7 +1822,7 @@ def dbcheck():
try:
result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="mobile_devices"').fetchone()
if 'device_token TEXT NOT NULL UNIQUE' in result[0]:
logger.debug(u"Altering database. Dropping and recreating mobile_devices table.")
logger.debug("Altering database. Dropping and recreating mobile_devices table.")
c_db.execute(
'DROP TABLE mobile_devices'
)
@ -1832,14 +1831,14 @@ def dbcheck():
'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT)'
)
except sqlite3.OperationalError:
logger.warn(u"Failed to recreate mobile_devices table.")
logger.warn("Failed to recreate mobile_devices table.")
pass
# Upgrade mobile_devices table from earlier versions
try:
c_db.execute('SELECT last_seen FROM mobile_devices')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table mobile_devices.")
logger.debug("Altering database. Updating database table mobile_devices.")
c_db.execute(
'ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER'
)
@ -1848,7 +1847,7 @@ def dbcheck():
try:
c_db.execute('SELECT custom_conditions FROM notifiers')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notifiers.")
logger.debug("Altering database. Updating database table notifiers.")
c_db.execute(
'ALTER TABLE notifiers ADD COLUMN custom_conditions TEXT'
)
@ -1860,7 +1859,7 @@ def dbcheck():
try:
c_db.execute('SELECT on_change FROM notifiers')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table notifiers.")
logger.debug("Altering database. Updating database table notifiers.")
c_db.execute(
'ALTER TABLE notifiers ADD COLUMN on_change INTEGER DEFAULT 0'
)
@ -1875,7 +1874,7 @@ def dbcheck():
try:
c_db.execute('SELECT rating_key FROM tvmaze_lookup')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table tvmaze_lookup.")
logger.debug("Altering database. Updating database table tvmaze_lookup.")
c_db.execute(
'ALTER TABLE tvmaze_lookup ADD COLUMN rating_key INTEGER'
)
@ -1890,7 +1889,7 @@ def dbcheck():
try:
c_db.execute('SELECT rating_key FROM themoviedb_lookup')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table themoviedb_lookup.")
logger.debug("Altering database. Updating database table themoviedb_lookup.")
c_db.execute(
'ALTER TABLE themoviedb_lookup ADD COLUMN rating_key INTEGER'
)
@ -1905,7 +1904,7 @@ def dbcheck():
try:
c_db.execute('SELECT success FROM user_login')
except sqlite3.OperationalError:
logger.debug(u"Altering database. Updating database table user_login.")
logger.debug("Altering database. Updating database table user_login.")
c_db.execute(
'ALTER TABLE user_login ADD COLUMN success INTEGER DEFAULT 1'
)
@ -1914,7 +1913,7 @@ def dbcheck():
result = c_db.execute('SELECT agent_label FROM notifiers '
'WHERE agent_label = "XBMC" OR agent_label = "OSX Notify"').fetchone()
if result:
logger.debug(u"Altering database. Renaming notifiers.")
logger.debug("Altering database. Renaming notifiers.")
c_db.execute(
'UPDATE notifiers SET agent_label = "Kodi" WHERE agent_label = "XBMC"'
)
@ -1925,7 +1924,7 @@ def dbcheck():
# Add "Local" user to database as default unauthenticated user.
result = c_db.execute('SELECT id FROM users WHERE username = "Local"')
if not result.fetchone():
logger.debug(u"User 'Local' does not exist. Adding user.")
logger.debug("User 'Local' does not exist. Adding user.")
c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")')
# Create table indices
@ -1948,7 +1947,7 @@ def dbcheck():
result = db.select('SELECT SQL FROM sqlite_master WHERE type="table" AND name="poster_urls"')
if result:
result = db.select('SELECT * FROM poster_urls')
logger.debug(u"Altering database. Updating database table imgur_lookup.")
logger.debug("Altering database. Updating database table imgur_lookup.")
data_factory = datafactory.DataFactory()
@ -1990,31 +1989,31 @@ def shutdown(restart=False, update=False, checkout=False):
CONFIG.write()
if not restart and not update and not checkout:
logger.info(u"Tautulli is shutting down...")
logger.info("Tautulli is shutting down...")
if update:
logger.info(u"Tautulli is updating...")
logger.info("Tautulli is updating...")
try:
versioncheck.update()
except Exception as e:
logger.warn(u"Tautulli failed to update: %s. Restarting." % e)
logger.warn("Tautulli failed to update: %s. Restarting." % e)
if checkout:
logger.info(u"Tautulli is switching the git branch...")
logger.info("Tautulli is switching the git branch...")
try:
versioncheck.checkout_git_branch()
except Exception as e:
logger.warn(u"Tautulli failed to switch git branch: %s. Restarting." % e)
logger.warn("Tautulli failed to switch git branch: %s. Restarting." % e)
if CREATEPID:
logger.info(u"Removing pidfile %s", PIDFILE)
logger.info("Removing pidfile %s", PIDFILE)
os.remove(PIDFILE)
if WIN_SYS_TRAY_ICON:
WIN_SYS_TRAY_ICON.shutdown()
if restart:
logger.info(u"Tautulli is restarting...")
logger.info("Tautulli is restarting...")
exe = sys.executable
args = [exe, FULL_PATH]
@ -2088,7 +2087,7 @@ def analytics_event(category, action, label=None, value=None, **kwargs):
try:
TRACKER.send('event', data)
except Exception as e:
logger.warn(u"Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e))
logger.warn("Failed to send analytics event for category '%s', action '%s': %s" % (category, action, e))
def check_folder_writable(folder, fallback, name):
@ -2099,17 +2098,17 @@ def check_folder_writable(folder, fallback, name):
try:
os.makedirs(folder)
except OSError as e:
logger.error(u"Could not create %s dir '%s': %s" % (name, folder, e))
logger.error("Could not create %s dir '%s': %s" % (name, folder, e))
if folder != fallback:
logger.warn(u"Falling back to %s dir '%s'" % (name, fallback))
logger.warn("Falling back to %s dir '%s'" % (name, fallback))
return check_folder_writable(None, fallback, name)
else:
return folder, None
if not os.access(folder, os.W_OK):
logger.error(u"Cannot write to %s dir '%s'" % (name, folder))
logger.error("Cannot write to %s dir '%s'" % (name, folder))
if folder != fallback:
logger.warn(u"Falling back to %s dir '%s'" % (name, fallback))
logger.warn("Falling back to %s dir '%s'" % (name, fallback))
return check_folder_writable(None, fallback, name)
else:
return folder, False

View file

@ -107,7 +107,7 @@ class ActivityHandler(object):
if not session:
return
logger.debug(u"Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)."
logger.debug("Tautulli ActivityHandler :: Session %s started by user %s (%s) with ratingKey %s (%s)."
% (str(session['session_key']), str(session['user_id']), session['username'],
str(session['rating_key']), session['full_title']))
@ -124,7 +124,7 @@ class ActivityHandler(object):
def on_stop(self, force_stop=False):
if self.is_valid_session():
logger.debug(u"Tautulli ActivityHandler :: Session %s %sstopped."
logger.debug("Tautulli ActivityHandler :: Session %s %sstopped."
% (str(self.get_session_key()), 'force ' if force_stop else ''))
# Set the session last_paused timestamp
@ -149,7 +149,7 @@ class ActivityHandler(object):
schedule_callback('session_key-{}'.format(self.get_session_key()), remove_job=True)
# Remove the session from our temp session table
logger.debug(u"Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
logger.debug("Tautulli ActivityHandler :: Removing sessionKey %s ratingKey %s from session queue"
% (str(self.get_session_key()), str(self.get_rating_key())))
ap.delete_session(row_id=row_id)
delete_metadata_cache(self.get_session_key())
@ -162,7 +162,7 @@ class ActivityHandler(object):
def on_pause(self, still_paused=False):
if self.is_valid_session():
if not still_paused:
logger.debug(u"Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
logger.debug("Tautulli ActivityHandler :: Session %s paused." % str(self.get_session_key()))
# Set the session last_paused timestamp
ap = activity_processor.ActivityProcessor()
@ -179,7 +179,7 @@ class ActivityHandler(object):
def on_resume(self):
if self.is_valid_session():
logger.debug(u"Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
logger.debug("Tautulli ActivityHandler :: Session %s resumed." % str(self.get_session_key()))
# Set the session last_paused timestamp
ap = activity_processor.ActivityProcessor()
@ -195,7 +195,7 @@ class ActivityHandler(object):
def on_change(self):
if self.is_valid_session():
logger.debug(u"Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
logger.debug("Tautulli ActivityHandler :: Session %s has changed transcode decision." % str(self.get_session_key()))
# Update the session state and viewOffset
self.update_db_session()
@ -208,7 +208,7 @@ class ActivityHandler(object):
def on_buffer(self):
if self.is_valid_session():
logger.debug(u"Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
logger.debug("Tautulli ActivityHandler :: Session %s is buffering." % self.get_session_key())
ap = activity_processor.ActivityProcessor()
db_stream = ap.get_session_by_key(session_key=self.get_session_key())
@ -217,7 +217,7 @@ class ActivityHandler(object):
# Get our current buffer count
current_buffer_count = ap.get_session_buffer_count(self.get_session_key())
logger.debug(u"Tautulli ActivityHandler :: Session %s buffer count is %s." %
logger.debug("Tautulli ActivityHandler :: Session %s buffer count is %s." %
(self.get_session_key(), current_buffer_count))
# Get our last triggered time
@ -228,7 +228,7 @@ class ActivityHandler(object):
time_since_last_trigger = None
if buffer_last_triggered:
logger.debug(u"Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
logger.debug("Tautulli ActivityHandler :: Session %s buffer last triggered at %s." %
(self.get_session_key(), buffer_last_triggered))
time_since_last_trigger = int(time.time()) - int(buffer_last_triggered)
@ -318,7 +318,7 @@ class ActivityHandler(object):
}
if progress_percent >= watched_percent.get(db_session['media_type'], 101):
logger.debug(u"Tautulli ActivityHandler :: Session %s watched."
logger.debug("Tautulli ActivityHandler :: Session %s watched."
% str(self.get_session_key()))
ap.set_watched(session_key=self.get_session_key())
@ -413,7 +413,7 @@ class TimelineHandler(object):
RECENTLY_ADDED_QUEUE[rating_key] = set([grandparent_rating_key])
logger.debug(u"Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue."
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s, grandparent %s) added to recently added queue."
% (title, str(rating_key), str(grandparent_rating_key)))
# Schedule a callback to clear the recently added queue
@ -432,7 +432,7 @@ class TimelineHandler(object):
parent_set.add(rating_key)
RECENTLY_ADDED_QUEUE[parent_rating_key] = parent_set
logger.debug(u"Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue."
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s , parent %s) added to recently added queue."
% (title, str(rating_key), str(parent_rating_key)))
# Schedule a callback to clear the recently added queue
@ -445,7 +445,7 @@ class TimelineHandler(object):
queue_set = RECENTLY_ADDED_QUEUE.get(rating_key, set())
RECENTLY_ADDED_QUEUE[rating_key] = queue_set
logger.debug(u"Tautulli TimelineHandler :: Library item '%s' (%s) added to recently added queue."
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) added to recently added queue."
% (title, str(rating_key)))
# Schedule a callback to clear the recently added queue
@ -459,13 +459,13 @@ class TimelineHandler(object):
state_type == 5 and metadata_state is None and queue_size is None and \
rating_key in RECENTLY_ADDED_QUEUE:
logger.debug(u"Tautulli TimelineHandler :: Library item '%s' (%s) done processing metadata."
logger.debug("Tautulli TimelineHandler :: Library item '%s' (%s) done processing metadata."
% (title, str(rating_key)))
# An item was deleted, make sure it is removed from the queue
elif state_type == 9 and metadata_state == 'deleted':
if rating_key in RECENTLY_ADDED_QUEUE and not RECENTLY_ADDED_QUEUE[rating_key]:
logger.debug(u"Tautulli TimelineHandler :: Library item %s removed from recently added queue."
logger.debug("Tautulli TimelineHandler :: Library item %s removed from recently added queue."
% str(rating_key))
del_keys(rating_key)
@ -505,7 +505,7 @@ def force_stop_stream(session_key, title, user):
if row_id:
# If session is written to the database successfully, remove the session from the session table
logger.info(u"Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
% (session['session_key'], session['rating_key']))
ap.delete_session(row_id=row_id)
delete_metadata_cache(session_key)
@ -514,7 +514,7 @@ def force_stop_stream(session_key, title, user):
session['write_attempts'] += 1
if session['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn(u"Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Will try again in 30 seconds. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
ap.increment_write_attempts(session_key=session_key)
@ -524,10 +524,10 @@ def force_stop_stream(session_key, title, user):
args=[session_key, session['full_title'], session['user']], seconds=30)
else:
logger.warn(u"Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
logger.warn("Tautulli ActivityHandler :: Failed to write stream with sessionKey %s ratingKey %s to the database. " \
"Removing session from the database. Write attempt %s."
% (session['session_key'], session['rating_key'], str(session['write_attempts'])))
logger.info(u"Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
logger.info("Tautulli ActivityHandler :: Removing stale stream with sessionKey %s ratingKey %s from session queue"
% (session['session_key'], session['rating_key']))
ap.delete_session(session_key=session_key)
delete_metadata_cache(session_key)
@ -561,7 +561,7 @@ def clear_recently_added_queue(rating_key, title):
def on_created(rating_key, **kwargs):
logger.debug(u"Tautulli TimelineHandler :: Library item %s added to Plex." % str(rating_key))
logger.debug("Tautulli TimelineHandler :: Library item %s added to Plex." % str(rating_key))
pms_connect = pmsconnect.PmsConnect()
metadata = pms_connect.get_metadata_details(rating_key)
@ -570,14 +570,14 @@ def on_created(rating_key, **kwargs):
# now = int(time.time())
#
# if helpers.cast_to_int(metadata['added_at']) < now - 86400: # Updated more than 24 hours ago
# logger.debug(u"Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying."
# logger.debug("Tautulli TimelineHandler :: Library item %s added more than 24 hours ago. Not notifying."
# % str(rating_key))
# notify = False
data_factory = datafactory.DataFactory()
if 'child_keys' not in kwargs:
if data_factory.get_recently_added_item(rating_key):
logger.debug(u"Tautulli TimelineHandler :: Library item %s added already. Not notifying again."
logger.debug("Tautulli TimelineHandler :: Library item %s added already. Not notifying again."
% str(rating_key))
notify = False
@ -593,15 +593,15 @@ def on_created(rating_key, **kwargs):
for key in all_keys:
data_factory.set_recently_added_item(key)
logger.debug(u"Added %s items to the recently_added database table." % str(len(all_keys)))
logger.debug("Added %s items to the recently_added database table." % str(len(all_keys)))
else:
logger.error(u"Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
logger.error("Tautulli TimelineHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key))
def delete_metadata_cache(session_key):
try:
os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, 'session_metadata/metadata-sessionKey-%s.json' % session_key))
except OSError as e:
logger.error(u"Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
logger.error("Tautulli ActivityHandler :: Failed to remove metadata cache file (sessionKey %s): %s"
% (session_key, e))

View file

@ -48,7 +48,7 @@ def check_active_sessions(ws_request=False):
pms_connect = pmsconnect.PmsConnect()
session_list = pms_connect.get_current_activity()
logger.debug(u"Tautulli Monitor :: Checking for active streams.")
logger.debug("Tautulli Monitor :: Checking for active streams.")
if session_list:
media_container = session_list['sessions']
@ -65,12 +65,12 @@ def check_active_sessions(ws_request=False):
# Here we can check the play states
if session['state'] != stream['state']:
if session['state'] == 'paused':
logger.debug(u"Tautulli Monitor :: Session %s paused." % stream['session_key'])
logger.debug("Tautulli Monitor :: Session %s paused." % stream['session_key'])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_pause'})
if session['state'] == 'playing' and stream['state'] == 'paused':
logger.debug(u"Tautulli Monitor :: Session %s resumed." % stream['session_key'])
logger.debug("Tautulli Monitor :: Session %s resumed." % stream['session_key'])
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_resume'})
@ -102,7 +102,7 @@ def check_active_sessions(ws_request=False):
# Push it on it's own thread so we don't hold up our db actions
# Our first buffer notification
if buffer_values[0]['buffer_count'] == plexpy.CONFIG.BUFFER_THRESHOLD:
logger.info(u"Tautulli Monitor :: User '%s' has triggered a buffer warning."
logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning."
% stream['user'])
# Set the buffer trigger time
monitor_db.action('UPDATE sessions '
@ -116,7 +116,7 @@ def check_active_sessions(ws_request=False):
# Subsequent buffer notifications after wait time
if int(time.time()) > buffer_values[0]['buffer_last_triggered'] + \
plexpy.CONFIG.BUFFER_WAIT:
logger.info(u"Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings."
% stream['user'])
# Set the buffer trigger time
monitor_db.action('UPDATE sessions '
@ -126,7 +126,7 @@ def check_active_sessions(ws_request=False):
plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'})
logger.debug(u"Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
logger.debug("Tautulli Monitor :: Session %s is buffering. Count is now %s. Last triggered %s."
% (stream['session_key'],
buffer_values[0]['buffer_count'],
buffer_values[0]['buffer_last_triggered']))
@ -146,7 +146,7 @@ def check_active_sessions(ws_request=False):
else:
# The user has stopped playing a stream
if stream['state'] != 'stopped':
logger.debug(u"Tautulli Monitor :: Session %s stopped." % stream['session_key'])
logger.debug("Tautulli Monitor :: Session %s stopped." % stream['session_key'])
if not stream['stopped']:
# Set the stream stop time
@ -170,22 +170,22 @@ def check_active_sessions(ws_request=False):
if row_id:
# If session is written to the databaase successfully, remove the session from the session table
logger.debug(u"Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
logger.debug("Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
% (stream['session_key'], stream['rating_key']))
monitor_process.delete_session(row_id=row_id)
else:
stream['write_attempts'] += 1
if stream['write_attempts'] < plexpy.CONFIG.SESSION_DB_WRITE_ATTEMPTS:
logger.warn(u"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
"Will try again on the next pass. Write attempt %s."
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
monitor_process.increment_write_attempts(session_key=stream['session_key'])
else:
logger.warn(u"Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
logger.warn("Tautulli Monitor :: Failed to write sessionKey %s ratingKey %s to the database. " \
"Removing session from the database. Write attempt %s."
% (stream['session_key'], stream['rating_key'], str(stream['write_attempts'])))
logger.debug(u"Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
logger.debug("Tautulli Monitor :: Removing sessionKey %s ratingKey %s from session queue"
% (stream['session_key'], stream['rating_key']))
monitor_process.delete_session(session_key=stream['session_key'])
@ -194,11 +194,11 @@ def check_active_sessions(ws_request=False):
new_session = monitor_process.write_session(session)
if new_session:
logger.debug(u"Tautulli Monitor :: Session %s started by user %s with ratingKey %s."
logger.debug("Tautulli Monitor :: Session %s started by user %s with ratingKey %s."
% (session['session_key'], session['user_id'], session['rating_key']))
else:
logger.debug(u"Tautulli Monitor :: Unable to read session list.")
logger.debug("Tautulli Monitor :: Unable to read session list.")
def check_recently_added():
@ -230,13 +230,13 @@ def check_recently_added():
if metadata:
metadata = [metadata]
else:
logger.error(u"Tautulli Monitor :: Unable to retrieve metadata for rating_key %s" \
logger.error("Tautulli Monitor :: Unable to retrieve metadata for rating_key %s" \
% str(item['rating_key']))
else:
metadata = pms_connect.get_metadata_children_details(item['rating_key'])
if not metadata:
logger.error(u"Tautulli Monitor :: Unable to retrieve children metadata for rating_key %s" \
logger.error("Tautulli Monitor :: Unable to retrieve children metadata for rating_key %s" \
% str(item['rating_key']))
if metadata:
@ -247,7 +247,7 @@ def check_recently_added():
library_details = library_data.get_details(section_id=item['section_id'])
if 0 < time_threshold - int(item['added_at']) <= time_interval:
logger.debug(u"Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
logger.debug("Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
plexpy.NOTIFY_QUEUE.put({'timeline_data': item.copy(), 'notify_action': 'on_created'})
@ -261,10 +261,10 @@ def check_recently_added():
if metadata:
item = metadata
else:
logger.error(u"Tautulli Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \
logger.error("Tautulli Monitor :: Unable to retrieve grandparent metadata for grandparent_rating_key %s" \
% str(item['rating_key']))
logger.debug(u"Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
logger.debug("Tautulli Monitor :: Library item %s added to Plex." % str(item['rating_key']))
# Check if any notification agents have notifications enabled
plexpy.NOTIFY_QUEUE.put({'timeline_data': item.copy(), 'notify_action': 'on_created'})
@ -273,19 +273,19 @@ def check_recently_added():
def connect_server(log=True, startup=False):
if plexpy.CONFIG.PMS_IS_CLOUD:
if log:
logger.info(u"Tautulli Monitor :: Checking for Plex Cloud server status...")
logger.info("Tautulli Monitor :: Checking for Plex Cloud server status...")
plex_tv = plextv.PlexTV()
status = plex_tv.get_cloud_server_status()
if status is True:
logger.info(u"Tautulli Monitor :: Plex Cloud server is active.")
logger.info("Tautulli Monitor :: Plex Cloud server is active.")
elif status is False:
if log:
logger.info(u"Tautulli Monitor :: Plex Cloud server is sleeping.")
logger.info("Tautulli Monitor :: Plex Cloud server is sleeping.")
else:
if log:
logger.error(u"Tautulli Monitor :: Failed to retrieve Plex Cloud server status.")
logger.error("Tautulli Monitor :: Failed to retrieve Plex Cloud server status.")
if not status and startup:
web_socket.on_disconnect()
@ -295,12 +295,12 @@ def connect_server(log=True, startup=False):
if status:
if log and not startup:
logger.info(u"Tautulli Monitor :: Attempting to reconnect Plex server...")
logger.info("Tautulli Monitor :: Attempting to reconnect Plex server...")
try:
web_socket.start_thread()
except Exception as e:
logger.error(u"Websocket :: Unable to open connection: %s." % e)
logger.error("Websocket :: Unable to open connection: %s." % e)
def check_server_access():
@ -319,17 +319,17 @@ def check_server_access():
# Check if the port is mapped
if not mapping_state == 'mapped':
ext_ping_count += 1
logger.warn(u"Tautulli Monitor :: Plex remote access port not mapped, ping attempt %s." \
logger.warn("Tautulli Monitor :: Plex remote access port not mapped, ping attempt %s." \
% str(ext_ping_count))
# Check if the port is open
elif mapping_error == 'unreachable':
ext_ping_count += 1
logger.warn(u"Tautulli Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \
logger.warn("Tautulli Monitor :: Plex remote access port mapped, but mapping failed, ping attempt %s." \
% str(ext_ping_count))
# Reset external ping counter
else:
if ext_ping_count >= plexpy.CONFIG.REMOTE_ACCESS_PING_THRESHOLD:
logger.info(u"Tautulli Monitor :: Plex remote access is back up.")
logger.info("Tautulli Monitor :: Plex remote access is back up.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_extup'})
@ -342,18 +342,18 @@ def check_server_access():
def check_server_updates():
with monitor_lock:
logger.info(u"Tautulli Monitor :: Checking for PMS updates...")
logger.info("Tautulli Monitor :: Checking for PMS updates...")
plex_tv = plextv.PlexTV()
download_info = plex_tv.get_plex_downloads()
if download_info:
logger.info(u"Tautulli Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
logger.info("Tautulli Monitor :: Current PMS version: %s", plexpy.CONFIG.PMS_VERSION)
if download_info['update_available']:
logger.info(u"Tautulli Monitor :: PMS update available version: %s", download_info['version'])
logger.info("Tautulli Monitor :: PMS update available version: %s", download_info['version'])
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_pmsupdate', 'pms_download_info': download_info})
else:
logger.info(u"Tautulli Monitor :: No PMS update available.")
logger.info("Tautulli Monitor :: No PMS update available.")

View file

@ -188,8 +188,8 @@ class ActivityProcessor(object):
if str(session['rating_key']).isdigit() and session['media_type'] in ('movie', 'episode', 'track'):
logging_enabled = True
else:
logger.debug(u"Tautulli ActivityProcessor :: Session %s ratingKey %s not logged. "
u"Does not meet logging criteria. Media type is '%s'" %
logger.debug("Tautulli ActivityProcessor :: Session %s ratingKey %s not logged. "
"Does not meet logging criteria. Media type is '%s'" %
(session['session_key'], session['rating_key'], session['media_type']))
return session['id']
@ -202,36 +202,36 @@ class ActivityProcessor(object):
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
(real_play_time < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
logging_enabled = False
logger.debug(u"Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs "
u"which is less than %s seconds, so we're not logging it." %
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs "
"which is less than %s seconds, so we're not logging it." %
(session['session_key'], session['rating_key'], str(real_play_time),
plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
if not is_import and session['media_type'] == 'track':
if real_play_time < 15 and session['duration'] >= 30:
logging_enabled = False
logger.debug(u"Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs, "
u"looks like it was skipped so we're not logging it" %
logger.debug("Tautulli ActivityProcessor :: Play duration for session %s ratingKey %s is %s secs, "
"looks like it was skipped so we're not logging it" %
(session['session_key'], session['rating_key'], str(real_play_time)))
elif is_import and import_ignore_interval:
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
(real_play_time < int(import_ignore_interval)):
logging_enabled = False
logger.debug(u"Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
u"seconds, so we're not logging it." %
logger.debug("Tautulli ActivityProcessor :: Play duration for ratingKey %s is %s secs which is less than %s "
"seconds, so we're not logging it." %
(session['rating_key'], str(real_play_time), import_ignore_interval))
if not is_import and not user_details['keep_history']:
logging_enabled = False
logger.debug(u"Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username'])
logger.debug("Tautulli ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username'])
elif not is_import and not library_details['keep_history']:
logging_enabled = False
logger.debug(u"Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
logger.debug("Tautulli ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name'])
if logging_enabled:
# Fetch metadata first so we can return false if it fails
if not is_import:
logger.debug(u"Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
logger.debug("Tautulli ActivityProcessor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
pms_connect = pmsconnect.PmsConnect()
metadata = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
if not metadata:
@ -245,7 +245,7 @@ class ActivityProcessor(object):
## TODO: Fix media info from imports. Temporary media info from import session.
media_info = session
# logger.debug(u"Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..."
# logger.debug("Tautulli ActivityProcessor :: Attempting to write sessionKey %s to session_history table..."
# % session['session_key'])
keys = {'id': None}
values = {'started': session['started'],
@ -273,7 +273,7 @@ class ActivityProcessor(object):
'relayed': session['relayed']
}
# logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..."
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history', key_dict=keys, value_dict=values)
@ -326,12 +326,12 @@ class ActivityProcessor(object):
self.db.action(query=query, args=args)
# logger.debug(u"Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
# logger.debug("Tautulli ActivityProcessor :: Successfully written history item, last id for session_history is %s"
# % last_id)
# Write the session_history_media_info table
# logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..."
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_media_info table..."
# % session['session_key'])
keys = {'id': last_id}
values = {'rating_key': session['rating_key'],
@ -403,7 +403,7 @@ class ActivityProcessor(object):
'optimized_version_title': session['optimized_version_title']
}
# logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..."
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_media_info transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history_media_info', key_dict=keys, value_dict=values)
@ -414,7 +414,7 @@ class ActivityProcessor(object):
genres = ";".join(metadata['genres'])
labels = ";".join(metadata['labels'])
# logger.debug(u"Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..."
# logger.debug("Tautulli ActivityProcessor :: Attempting to write to sessionKey %s session_history_metadata table..."
# % session['session_key'])
keys = {'id': last_id}
values = {'rating_key': session['rating_key'],
@ -452,7 +452,7 @@ class ActivityProcessor(object):
'labels': labels
}
# logger.debug(u"Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
# logger.debug("Tautulli ActivityProcessor :: Writing sessionKey %s session_history_metadata transaction..."
# % session['session_key'])
self.db.upsert(table_name='session_history_metadata', key_dict=keys, value_dict=values)

View file

@ -142,9 +142,9 @@ class API2:
self._api_kwargs = kwargs
if self._api_msg:
logger.api_debug(u'Tautulli APIv2 :: %s.' % self._api_msg)
logger.api_debug('Tautulli APIv2 :: %s.' % self._api_msg)
logger.api_debug(u'Tautulli APIv2 :: Cleaned kwargs: %s' % self._api_kwargs)
logger.api_debug('Tautulli APIv2 :: Cleaned kwargs: %s' % self._api_kwargs)
return self._api_kwargs
@ -182,7 +182,7 @@ class API2:
end = int(end)
if regex:
logger.api_debug(u"Tautulli APIv2 :: Filtering log using regex '%s'" % regex)
logger.api_debug("Tautulli APIv2 :: Filtering log using regex '%s'" % regex)
reg = re.compile(regex, flags=re.I)
with open(logfile, 'r') as f:
@ -218,15 +218,15 @@ class API2:
templog = templog[::-1]
if end > 0 or start > 0:
logger.api_debug(u"Tautulli APIv2 :: Slicing the log from %s to %s" % (start, end))
logger.api_debug("Tautulli APIv2 :: Slicing the log from %s to %s" % (start, end))
templog = templog[start:end]
if sort:
logger.api_debug(u"Tautulli APIv2 :: Sorting log based on '%s'" % sort)
logger.api_debug("Tautulli APIv2 :: Sorting log based on '%s'" % sort)
templog = sorted(templog, key=lambda k: k[sort])
if search:
logger.api_debug(u"Tautulli APIv2 :: Searching log values for '%s'" % search)
logger.api_debug("Tautulli APIv2 :: Searching log values for '%s'" % search)
tt = [d for d in templog for k, v in d.items() if search.lower() in v.lower()]
if len(tt):
@ -235,7 +235,7 @@ class API2:
if regex:
tt = []
for l in templog:
stringdict = ' '.join(u'{}{}'.format(k, v) for k, v in l.items())
stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items())
if reg.search(stringdict):
tt.append(l)
@ -440,7 +440,7 @@ class API2:
self._api_result_type = 'error'
return
logger.api_debug(u'Tautulli APIv2 :: Sending notification.')
logger.api_debug('Tautulli APIv2 :: Sending notification.')
success = notification_handler.notify(notifier_id=notifier_id,
notify_action='api',
subject=subject,
@ -484,7 +484,7 @@ class API2:
self._api_result_type = 'error'
return
logger.api_debug(u'Tautulli APIv2 :: Sending newsletter.')
logger.api_debug('Tautulli APIv2 :: Sending newsletter.')
success = newsletter_handler.notify(newsletter_id=newsletter_id,
notify_action='api',
subject=subject,
@ -628,7 +628,7 @@ General optional parameters:
out = self._api_callback + '(' + out + ');'
# if we fail to generate the output fake an error
except Exception as e:
logger.api_exception(u'Tautulli APIv2 :: ' + traceback.format_exc())
logger.api_exception('Tautulli APIv2 :: ' + traceback.format_exc())
self._api_response_code = 500
out['message'] = traceback.format_exc()
out['result'] = 'error'
@ -638,7 +638,7 @@ General optional parameters:
try:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.api_error(u'Tautulli APIv2 :: Failed to parse xml result')
logger.api_error('Tautulli APIv2 :: Failed to parse xml result')
self._api_response_code = 500
try:
out['message'] = e
@ -646,7 +646,7 @@ General optional parameters:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.api_error(u'Tautulli APIv2 :: Failed to parse xml result error message %s' % e)
logger.api_error('Tautulli APIv2 :: Failed to parse xml result error message %s' % e)
out = '''<?xml version="1.0" encoding="utf-8"?>
<response>
<message>%s</message>
@ -661,7 +661,7 @@ General optional parameters:
""" handles the stuff from the handler """
result = {}
logger.api_debug(u'Tautulli APIv2 :: API called with kwargs: %s' % kwargs)
logger.api_debug('Tautulli APIv2 :: API called with kwargs: %s' % kwargs)
self._api_validate(**kwargs)
@ -679,7 +679,7 @@ General optional parameters:
result = call(**self._api_kwargs)
except Exception as e:
logger.api_error(u'Tautulli APIv2 :: Failed to run %s with %s: %s' % (self._api_cmd, self._api_kwargs, e))
logger.api_error('Tautulli APIv2 :: Failed to run %s with %s: %s' % (self._api_cmd, self._api_kwargs, e))
self._api_response_code = 500
if self._api_debug:
cherrypy.request.show_tracebacks = True

View file

@ -661,13 +661,13 @@ def make_backup(cleanup=False, scheduler=False):
try:
os.remove(file_)
except OSError as e:
logger.error(u"Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e))
logger.error("Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug(u"Tautulli Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file))
return True
else:
logger.error(u"Tautulli Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
logger.error("Tautulli Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file))
return False
@ -748,12 +748,12 @@ class Config(object):
new_config[section][ini_key] = self._config[section][ini_key]
# Write it to file
logger.info(u"Tautulli Config :: Writing configuration to file")
logger.info("Tautulli Config :: Writing configuration to file")
try:
new_config.write()
except IOError as e:
logger.error(u"Tautulli Config :: Error writing configuration file: %s", e)
logger.error("Tautulli Config :: Error writing configuration file: %s", e)
self._blacklist()

View file

@ -39,7 +39,7 @@ def drop_session_db():
def clear_history_tables():
logger.debug(u"Tautulli Database :: Deleting all session_history records... No turning back now bub.")
logger.debug("Tautulli Database :: Deleting all session_history records... No turning back now bub.")
monitor_db = MonitorDatabase()
monitor_db.action('DELETE FROM session_history')
monitor_db.action('DELETE FROM session_history_media_info')
@ -48,7 +48,7 @@ def clear_history_tables():
def delete_sessions():
logger.debug(u"Tautulli Database :: Clearing temporary sessions from database.")
logger.debug("Tautulli Database :: Clearing temporary sessions from database.")
monitor_db = MonitorDatabase()
try:
@ -56,7 +56,7 @@ def delete_sessions():
monitor_db.action('VACUUM')
return True
except Exception as e:
logger.warn(u"Tautulli Database :: Unable to clear temporary sessions from database: %s." % e)
logger.warn("Tautulli Database :: Unable to clear temporary sessions from database: %s." % e)
return False
@ -103,13 +103,13 @@ def make_backup(cleanup=False, scheduler=False):
try:
os.remove(file_)
except OSError as e:
logger.error(u"Tautulli Database :: Failed to delete %s from the backup folder: %s" % (file_, e))
logger.error("Tautulli Database :: Failed to delete %s from the backup folder: %s" % (file_, e))
if backup_file in os.listdir(backup_folder):
logger.debug(u"Tautulli Database :: Successfully backed up %s to %s" % (db_filename(), backup_file))
logger.debug("Tautulli Database :: Successfully backed up %s to %s" % (db_filename(), backup_file))
return True
else:
logger.error(u"Tautulli Database :: Failed to backup %s to %s" % (db_filename(), backup_file))
logger.error("Tautulli Database :: Failed to backup %s to %s" % (db_filename(), backup_file))
return False
@ -162,15 +162,15 @@ class MonitorDatabase(object):
except sqlite3.OperationalError as e:
if "unable to open database file" in e or "database is locked" in e:
logger.warn(u"Tautulli Database :: Database Error: %s", e)
logger.warn("Tautulli Database :: Database Error: %s", e)
attempts += 1
time.sleep(1)
else:
logger.error(u"Tautulli Database :: Database error: %s", e)
logger.error("Tautulli Database :: Database error: %s", e)
raise
except sqlite3.DatabaseError as e:
logger.error(u"Tautulli Database :: Fatal Error executing %s :: %s", query, e)
logger.error("Tautulli Database :: Fatal Error executing %s :: %s", query, e)
raise
return sql_result
@ -214,7 +214,7 @@ class MonitorDatabase(object):
try:
self.action(insert_query, value_dict.values() + key_dict.values())
except sqlite3.IntegrityError:
logger.info(u"Tautulli Database :: Queries failed: %s and %s", update_query, insert_query)
logger.info("Tautulli Database :: Queries failed: %s and %s", update_query, insert_query)
# We want to know if it was an update or insert
return trans_type

View file

@ -176,7 +176,7 @@ class DataFactory(object):
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_history: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_history: %s." % e)
return {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@ -302,7 +302,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e)
return None
for item in result:
@ -353,7 +353,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e)
return None
for item in result:
@ -400,7 +400,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e)
return None
for item in result:
@ -450,7 +450,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e)
return None
for item in result:
@ -498,7 +498,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e)
return None
for item in result:
@ -549,7 +549,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e)
return None
for item in result:
@ -597,7 +597,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e)
return None
for item in result:
@ -649,7 +649,7 @@ class DataFactory(object):
'LIMIT %s ' % (time_range, group_by, sort_type, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e)
return None
for item in result:
@ -706,7 +706,7 @@ class DataFactory(object):
'LIMIT %s' % (time_range, group_by, movie_watched_percent, tv_watched_percent, stats_count)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e)
return None
for item in result:
@ -812,7 +812,7 @@ class DataFactory(object):
if result:
most_concurrent.append(calc_most_concurrent(title, result))
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e)
return None
home_stats.append({'stat_id': stat,
@ -840,7 +840,7 @@ class DataFactory(object):
'ORDER BY section_type, count DESC, parent_count DESC, child_count DESC ' % ','.join(library_cards)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_library_stats: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_library_stats: %s." % e)
return None
for item in result:
@ -1103,7 +1103,7 @@ class DataFactory(object):
'%s ' % where
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_total_duration: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_total_duration: %s." % e)
return None
total_duration = 0
@ -1126,7 +1126,7 @@ class DataFactory(object):
query = 'SELECT ip_address FROM sessions WHERE session_key = %d %s' % (int(session_key), user_cond)
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_session_ip: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_session_ip: %s." % e)
return ip_address
else:
return ip_address
@ -1187,14 +1187,14 @@ class DataFactory(object):
'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash ' \
'%s %s' % (where, order_by)
else:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_img_info: "
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_img_info: "
"service not provided.")
return img_info
try:
img_info = monitor_db.select(query, args=args)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_img_info: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_img_info: %s." % e)
return img_info
@ -1213,7 +1213,7 @@ class DataFactory(object):
values = {'cloudinary_title': img_title,
'cloudinary_url': img_url}
else:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for set_img_info: "
logger.warn("Tautulli DataFactory :: Unable to execute database query for set_img_info: "
"service not provided.")
return
@ -1226,7 +1226,7 @@ class DataFactory(object):
service = helpers.get_img_service()
if not rating_key and not delete_all:
logger.error(u"Tautulli DataFactory :: Unable to delete hosted images: rating_key not provided.")
logger.error("Tautulli DataFactory :: Unable to delete hosted images: rating_key not provided.")
return False
where = ''
@ -1249,7 +1249,7 @@ class DataFactory(object):
img_title=imgur_info['imgur_title'],
fallback=imgur_info['fallback'])
logger.info(u"Tautulli DataFactory :: Deleting Imgur info%s from the database."
logger.info("Tautulli DataFactory :: Deleting Imgur info%s from the database."
% log_msg)
result = monitor_db.action('DELETE FROM imgur_lookup WHERE img_hash '
'IN (SELECT img_hash FROM image_hash_lookup %s)' % where,
@ -1268,14 +1268,14 @@ class DataFactory(object):
for cloudinary_info in results:
helpers.delete_from_cloudinary(rating_key=cloudinary_info['rating_key'])
logger.info(u"Tautulli DataFactory :: Deleting Cloudinary info%s from the database."
logger.info("Tautulli DataFactory :: Deleting Cloudinary info%s from the database."
% log_msg)
result = monitor_db.action('DELETE FROM cloudinary_lookup WHERE img_hash '
'IN (SELECT img_hash FROM image_hash_lookup %s)' % where,
args)
else:
logger.error(u"Tautulli DataFactory :: Unable to delete hosted images: invalid service '%s' provided."
logger.error("Tautulli DataFactory :: Unable to delete hosted images: invalid service '%s' provided."
% service)
return service
@ -1348,7 +1348,7 @@ class DataFactory(object):
lookup_info['musicbrainz_id'] = musicbrainz_info['musicbrainz_id']
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_lookup_info: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_lookup_info: %s." % e)
return lookup_info
@ -1356,7 +1356,7 @@ class DataFactory(object):
monitor_db = database.MonitorDatabase()
if rating_key:
logger.info(u"Tautulli DataFactory :: Deleting lookup info for '%s' (rating_key %s) from the database."
logger.info("Tautulli DataFactory :: Deleting lookup info for '%s' (rating_key %s) from the database."
% (title, rating_key))
result_tvmaze = monitor_db.action('DELETE FROM tvmaze_lookup WHERE rating_key = ?', [rating_key])
result_themoviedb = monitor_db.action('DELETE FROM themoviedb_lookup WHERE rating_key = ?', [rating_key])
@ -1451,7 +1451,7 @@ class DataFactory(object):
grandparent_rating_key = result[0]['grandparent_rating_key']
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e)
return {}
query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \
@ -1499,7 +1499,7 @@ class DataFactory(object):
monitor_db = database.MonitorDatabase()
if row_id.isdigit():
logger.info(u"Tautulli DataFactory :: Deleting row id %s from the session history database." % row_id)
logger.info("Tautulli DataFactory :: Deleting row id %s from the session history database." % row_id)
session_history_del = \
monitor_db.action('DELETE FROM session_history WHERE id = ?', [row_id])
session_history_media_info_del = \
@ -1532,7 +1532,7 @@ class DataFactory(object):
mapping = get_pairs(old_key_list, new_key_list)
if mapping:
logger.info(u"Tautulli DataFactory :: Updating metadata in the database.")
logger.info("Tautulli DataFactory :: Updating metadata in the database.")
for old_key, new_key in mapping.iteritems():
metadata = pms_connect.get_metadata_details(new_key)
@ -1581,7 +1581,7 @@ class DataFactory(object):
genres = ";".join(metadata['genres'])
labels = ";".join(metadata['labels'])
#logger.info(u"Tautulli DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key)
#logger.info("Tautulli DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key)
monitor_db = database.MonitorDatabase()
# Update the session_history_metadata table
@ -1635,7 +1635,7 @@ class DataFactory(object):
join_evals=[],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_notification_log: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_notification_log: %s." % e)
return {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@ -1680,12 +1680,12 @@ class DataFactory(object):
monitor_db = database.MonitorDatabase()
try:
logger.info(u"Tautulli DataFactory :: Clearing notification logs from database.")
logger.info("Tautulli DataFactory :: Clearing notification logs from database.")
monitor_db.action('DELETE FROM notify_log')
monitor_db.action('VACUUM')
return True
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for delete_notification_log: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_notification_log: %s." % e)
return False
def get_newsletter_log(self, kwargs=None):
@ -1714,7 +1714,7 @@ class DataFactory(object):
join_evals=[],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_newsletter_log: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_newsletter_log: %s." % e)
return {'recordsFiltered': 0,
'recordsTotal': 0,
'draw': 0,
@ -1753,12 +1753,12 @@ class DataFactory(object):
monitor_db = database.MonitorDatabase()
try:
logger.info(u"Tautulli DataFactory :: Clearing newsletter logs from database.")
logger.info("Tautulli DataFactory :: Clearing newsletter logs from database.")
monitor_db.action('DELETE FROM newsletter_log')
monitor_db.action('VACUUM')
return True
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e)
return False
def get_user_devices(self, user_id=''):
@ -1769,7 +1769,7 @@ class DataFactory(object):
query = 'SELECT machine_id FROM session_history WHERE user_id = ? GROUP BY machine_id'
result = monitor_db.select(query=query, args=[user_id])
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_user_devices: %s." % e)
return []
else:
return []
@ -1784,7 +1784,7 @@ class DataFactory(object):
query = 'SELECT * FROM recently_added WHERE rating_key = ?'
result = monitor_db.select(query=query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e)
return []
else:
return []
@ -1810,7 +1810,7 @@ class DataFactory(object):
try:
monitor_db.upsert(table_name='recently_added', key_dict=keys, value_dict=values)
except Exception as e:
logger.warn(u"Tautulli DataFactory :: Unable to execute database query for set_recently_added_item: %s." % e)
logger.warn("Tautulli DataFactory :: Unable to execute database query for set_recently_added_item: %s." % e)
return False
return True

View file

@ -84,7 +84,7 @@ class DataTables(object):
query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s %s) %s %s' \
% (extracted_columns['column_string'], table_name, join, c_where, group, union, where, order)
# logger.debug(u"Query: %s" % query)
# logger.debug("Query: %s" % query)
# Execute the query
filtered = self.ssp_db.select(query, args=args)

View file

@ -73,7 +73,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_day: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_day: %s." % e)
return None
# create our date range as some days may not have any data
@ -180,7 +180,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e)
return None
if plexpy.CONFIG.WEEK_START_MONDAY:
@ -272,7 +272,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_hourofday: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_hourofday: %s." % e)
return None
hours_list = ['00','01','02','03','04','05',
@ -364,7 +364,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_month: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_month: %s." % e)
return None
# create our date range as some months may not have any data
@ -461,7 +461,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_platforms: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_platforms: %s." % e)
return None
categories = []
@ -543,7 +543,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_users: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_top_10_users: %s." % e)
return None
categories = []
@ -634,7 +634,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e)
return None
# create our date range as some days may not have any data
@ -740,7 +740,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_source_resolution: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_source_resolution: %s." % e)
return None
categories = []
@ -850,7 +850,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_total_plays_by_stream_resolution: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_total_plays_by_stream_resolution: %s." % e)
return None
categories = []
@ -939,7 +939,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_platforms: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_platforms: %s." % e)
return None
categories = []
@ -1037,7 +1037,7 @@ class Graphs(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_users: %s." % e)
logger.warn("Tautulli Graphs :: Unable to execute database query for get_stream_type_by_top_10_users: %s." % e)
return None
categories = []

View file

@ -429,7 +429,7 @@ def create_https_certificates(ssl_cert, ssl_key):
altNames = ','.join(domains + ips)
# Create the self-signed Tautulli certificate
logger.debug(u"Generating self-signed SSL certificate.")
logger.debug("Generating self-signed SSL certificate.")
pkey = createKeyPair(TYPE_RSA, 2048)
cert = createSelfSignedCertificate(("Tautulli", pkey), serial, (0, 60 * 60 * 24 * 365 * 10), altNames) # ten years
@ -568,9 +568,9 @@ def get_ip(host):
elif not re.fullmatch(r'[0-9]+(?:\.[0-9]+){3}(?!\d*-[a-z0-9]{6})', host):
try:
ip_address = socket.getaddrinfo(host, None)[0][4][0]
logger.debug(u"IP Checker :: Resolved %s to %s." % (host, ip_address))
logger.debug("IP Checker :: Resolved %s to %s." % (host, ip_address))
except:
logger.error(u"IP Checker :: Bad IP or hostname provided: %s." % host)
logger.error("IP Checker :: Bad IP or hostname provided: %s." % host)
return ip_address
@ -594,27 +594,27 @@ def install_geoip_db():
geolite2_db = plexpy.CONFIG.GEOIP_DB or os.path.join(plexpy.DATA_DIR, geolite2_db)
# Retrieve the GeoLite2 gzip file
logger.debug(u"Tautulli Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
logger.debug("Tautulli Helpers :: Downloading GeoLite2 gzip file from MaxMind...")
try:
maxmind = urllib.URLopener()
maxmind.retrieve(maxmind_url + geolite2_gz, temp_gz)
md5_checksum = urllib2.urlopen(maxmind_url + geolite2_md5).read()
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
logger.error("Tautulli Helpers :: Failed to download GeoLite2 gzip file from MaxMind: %s" % e)
return False
# Extract the GeoLite2 database file
logger.debug(u"Tautulli Helpers :: Extracting GeoLite2 database...")
logger.debug("Tautulli Helpers :: Extracting GeoLite2 database...")
try:
with gzip.open(temp_gz, 'rb') as gz:
with open(geolite2_db, 'wb') as db:
db.write(gz.read())
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to extract the GeoLite2 database: %s" % e)
logger.error("Tautulli Helpers :: Failed to extract the GeoLite2 database: %s" % e)
return False
# Check MD5 hash for GeoLite2 database file
logger.debug(u"Tautulli Helpers :: Checking MD5 checksum for GeoLite2 database...")
logger.debug("Tautulli Helpers :: Checking MD5 checksum for GeoLite2 database...")
try:
hash_md5 = hashlib.md5()
with open(geolite2_db, 'rb') as f:
@ -623,21 +623,21 @@ def install_geoip_db():
md5_hash = hash_md5.hexdigest()
if md5_hash != md5_checksum:
logger.error(u"Tautulli Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
logger.error("Tautulli Helpers :: MD5 checksum doesn't match for GeoLite2 database. "
"Checksum: %s, file hash: %s" % (md5_checksum, md5_hash))
return False
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to generate MD5 checksum for GeoLite2 database: %s" % e)
logger.error("Tautulli Helpers :: Failed to generate MD5 checksum for GeoLite2 database: %s" % e)
return False
# Delete temportary GeoLite2 gzip file
logger.debug(u"Tautulli Helpers :: Deleting temporary GeoLite2 gzip file...")
logger.debug("Tautulli Helpers :: Deleting temporary GeoLite2 gzip file...")
try:
os.remove(temp_gz)
except Exception as e:
logger.warn(u"Tautulli Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
logger.warn("Tautulli Helpers :: Failed to remove temporary GeoLite2 gzip file: %s" % e)
logger.debug(u"Tautulli Helpers :: GeoLite2 database installed successfully.")
logger.debug("Tautulli Helpers :: GeoLite2 database installed successfully.")
plexpy.CONFIG.__setattr__('GEOIP_DB', geolite2_db)
plexpy.CONFIG.write()
@ -645,16 +645,16 @@ def install_geoip_db():
def uninstall_geoip_db():
logger.debug(u"Tautulli Helpers :: Uninstalling the GeoLite2 database...")
logger.debug("Tautulli Helpers :: Uninstalling the GeoLite2 database...")
try:
os.remove(plexpy.CONFIG.GEOIP_DB)
plexpy.CONFIG.__setattr__('GEOIP_DB', '')
plexpy.CONFIG.write()
except Exception as e:
logger.error(u"Tautulli Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
logger.error("Tautulli Helpers :: Failed to uninstall the GeoLite2 database: %s" % e)
return False
logger.debug(u"Tautulli Helpers :: GeoLite2 database uninstalled successfully.")
logger.debug("Tautulli Helpers :: GeoLite2 database uninstalled successfully.")
return True
@ -758,7 +758,7 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
img_url = delete_hash = ''
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
logger.error(u"Tautulli Helpers :: Cannot upload image to Imgur. No Imgur client id specified in the settings.")
logger.error("Tautulli Helpers :: Cannot upload image to Imgur. No Imgur client id specified in the settings.")
return img_url, delete_hash
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
@ -771,18 +771,18 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
headers=headers, data=data)
if response and not err_msg:
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) uploaded to Imgur.".format(img_title, fallback))
logger.debug("Tautulli Helpers :: Image '{}' ({}) uploaded to Imgur.".format(img_title, fallback))
imgur_response_data = response.json().get('data')
img_url = imgur_response_data.get('link', '').replace('http://', 'https://')
delete_hash = imgur_response_data.get('deletehash', '')
else:
if err_msg:
logger.error(u"Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur: {}".format(img_title, fallback, err_msg))
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur: {}".format(img_title, fallback, err_msg))
else:
logger.error(u"Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur.".format(img_title, fallback))
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Imgur.".format(img_title, fallback))
if req_msg:
logger.debug(u"Tautulli Helpers :: Request response: {}".format(req_msg))
logger.debug("Tautulli Helpers :: Request response: {}".format(req_msg))
return img_url, delete_hash
@ -790,7 +790,7 @@ def upload_to_imgur(img_data, img_title='', rating_key='', fallback=''):
def delete_from_imgur(delete_hash, img_title='', fallback=''):
""" Deletes an image from Imgur """
if not plexpy.CONFIG.IMGUR_CLIENT_ID:
logger.error(u"Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
logger.error("Tautulli Helpers :: Cannot delete image from Imgur. No Imgur client id specified in the settings.")
return False
headers = {'Authorization': 'Client-ID %s' % plexpy.CONFIG.IMGUR_CLIENT_ID}
@ -799,13 +799,13 @@ def delete_from_imgur(delete_hash, img_title='', fallback=''):
headers=headers)
if response and not err_msg:
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) deleted from Imgur.".format(img_title, fallback))
logger.debug("Tautulli Helpers :: Image '{}' ({}) deleted from Imgur.".format(img_title, fallback))
return True
else:
if err_msg:
logger.error(u"Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur: {}".format(img_title, fallback, err_msg))
logger.error("Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur: {}".format(img_title, fallback, err_msg))
else:
logger.error(u"Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur.".format(img_title, fallback))
logger.error("Tautulli Helpers :: Unable to delete image '{}' ({}) from Imgur.".format(img_title, fallback))
return False
@ -814,7 +814,7 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
img_url = ''
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
logger.error(u"Tautulli Helpers :: Cannot upload image to Cloudinary. Cloudinary settings not specified in the settings.")
logger.error("Tautulli Helpers :: Cannot upload image to Cloudinary. Cloudinary settings not specified in the settings.")
return img_url
cloudinary.config(
@ -828,10 +828,10 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
public_id='{}_{}'.format(fallback, rating_key),
tags=['tautulli', fallback, str(rating_key)],
context={'title': img_title.encode('utf-8'), 'rating_key': str(rating_key), 'fallback': fallback})
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) uploaded to Cloudinary.".format(img_title, fallback))
logger.debug("Tautulli Helpers :: Image '{}' ({}) uploaded to Cloudinary.".format(img_title, fallback))
img_url = response.get('url', '')
except Exception as e:
logger.error(u"Tautulli Helpers :: Unable to upload image '{}' ({}) to Cloudinary: {}".format(img_title, fallback, e))
logger.error("Tautulli Helpers :: Unable to upload image '{}' ({}) to Cloudinary: {}".format(img_title, fallback, e))
return img_url
@ -839,7 +839,7 @@ def upload_to_cloudinary(img_data, img_title='', rating_key='', fallback=''):
def delete_from_cloudinary(rating_key=None, delete_all=False):
""" Deletes an image from Cloudinary """
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
logger.error(u"Tautulli Helpers :: Cannot delete image from Cloudinary. Cloudinary settings not specified in the settings.")
logger.error("Tautulli Helpers :: Cannot delete image from Cloudinary. Cloudinary settings not specified in the settings.")
return False
cloudinary.config(
@ -850,12 +850,12 @@ def delete_from_cloudinary(rating_key=None, delete_all=False):
if delete_all:
delete_resources_by_tag('tautulli')
logger.debug(u"Tautulli Helpers :: Deleted all images from Cloudinary.")
logger.debug("Tautulli Helpers :: Deleted all images from Cloudinary.")
elif rating_key:
delete_resources_by_tag(str(rating_key))
logger.debug(u"Tautulli Helpers :: Deleted images from Cloudinary with rating_key {}.".format(rating_key))
logger.debug("Tautulli Helpers :: Deleted images from Cloudinary with rating_key {}.".format(rating_key))
else:
logger.debug(u"Tautulli Helpers :: Unable to delete images from Cloudinary: No rating_key provided.")
logger.debug("Tautulli Helpers :: Unable to delete images from Cloudinary: No rating_key provided.")
return True
@ -865,7 +865,7 @@ def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100,
url = ''
if not plexpy.CONFIG.CLOUDINARY_CLOUD_NAME or not plexpy.CONFIG.CLOUDINARY_API_KEY or not plexpy.CONFIG.CLOUDINARY_API_SECRET:
logger.error(u"Tautulli Helpers :: Cannot transform image on Cloudinary. Cloudinary settings not specified in the settings.")
logger.error("Tautulli Helpers :: Cannot transform image on Cloudinary. Cloudinary settings not specified in the settings.")
return url
cloudinary.config(
@ -895,9 +895,9 @@ def cloudinary_transform(rating_key=None, width=1000, height=1500, opacity=100,
try:
url, options = cloudinary_url('{}_{}'.format(fallback, rating_key), **img_options)
logger.debug(u"Tautulli Helpers :: Image '{}' ({}) transformed on Cloudinary.".format(img_title, fallback))
logger.debug("Tautulli Helpers :: Image '{}' ({}) transformed on Cloudinary.".format(img_title, fallback))
except Exception as e:
logger.error(u"Tautulli Helpers :: Unable to transform image '{}' ({}) on Cloudinary: {}".format(img_title, fallback, e))
logger.error("Tautulli Helpers :: Unable to transform image '{}' ({}) on Cloudinary: {}".format(img_title, fallback, e))
return url
@ -910,7 +910,7 @@ def cache_image(url, image=None):
# Create image directory if it doesn't exist
imgdir = os.path.join(plexpy.CONFIG.CACHE_DIR, 'images/')
if not os.path.exists(imgdir):
logger.debug(u"Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
logger.debug("Tautulli Helpers :: Creating image cache directory at %s" % imgdir)
os.makedirs(imgdir)
# Create a hash of the url to use as the filename
@ -923,7 +923,7 @@ def cache_image(url, image=None):
with open(imagefile, 'wb') as cache_file:
cache_file.write(image)
except IOError as e:
logger.error(u"Tautulli Helpers :: Failed to cache image %s: %s" % (imagefile, e))
logger.error("Tautulli Helpers :: Failed to cache image %s: %s" % (imagefile, e))
# Try to return the image from the cache directory
if os.path.isfile(imagefile):

View file

@ -86,7 +86,7 @@ class HTTPHandler(object):
self.timeout = timeout or self.timeout
if self.request_type not in self.valid_request_types:
logger.debug(u"HTTP request made but unsupported request type given.")
logger.debug("HTTP request made but unsupported request type given.")
return None
if uri:
@ -104,7 +104,7 @@ class HTTPHandler(object):
return responses[0]
else:
logger.debug(u"HTTP request made but no enpoint given.")
logger.debug("HTTP request made but no enpoint given.")
return None
def _http_requests_pool(self, urls, workers=10, chunk=None):
@ -134,7 +134,7 @@ class HTTPHandler(object):
yield work
except Exception as e:
if not self._silent:
logger.error(u"Failed to yield request: %s" % e)
logger.error("Failed to yield request: %s" % e)
finally:
pool.close()
pool.join()
@ -145,15 +145,15 @@ class HTTPHandler(object):
r = session.request(self.request_type, url, headers=self.headers, timeout=self.timeout)
except IOError as e:
if not self._silent:
logger.warn(u"Failed to access uri endpoint %s with error %s" % (self.uri, e))
logger.warn("Failed to access uri endpoint %s with error %s" % (self.uri, e))
return None
except Exception as e:
if not self._silent:
logger.warn(u"Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (self.uri, e))
logger.warn("Failed to access uri endpoint %s. Is your server maybe accepting SSL connections only? %s" % (self.uri, e))
return None
except:
if not self._silent:
logger.warn(u"Failed to access uri endpoint %s with Uncaught exception." % self.uri)
logger.warn("Failed to access uri endpoint %s with Uncaught exception." % self.uri)
return None
response_status = r.status
@ -164,7 +164,7 @@ class HTTPHandler(object):
return self._http_format_output(response_content, response_headers)
else:
if not self._silent:
logger.warn(u"Failed to access uri endpoint %s. Status code %r" % (self.uri, response_status))
logger.warn("Failed to access uri endpoint %s. Status code %r" % (self.uri, response_status))
return None
def _http_format_output(self, response_content, response_headers):
@ -191,5 +191,5 @@ class HTTPHandler(object):
except Exception as e:
if not self._silent:
logger.warn(u"Failed format response from uri %s to %s error %s" % (self.uri, self.output_format, e))
logger.warn("Failed format response from uri %s to %s error %s" % (self.uri, self.output_format, e))
return None

View file

@ -28,11 +28,11 @@ import session
def refresh_libraries():
logger.info(u"Tautulli Libraries :: Requesting libraries list refresh...")
logger.info("Tautulli Libraries :: Requesting libraries list refresh...")
server_id = plexpy.CONFIG.PMS_IDENTIFIER
if not server_id:
logger.error(u"Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
logger.error("Tautulli Libraries :: No PMS identifier, cannot refresh libraries. Verify server in settings.")
return
library_sections = pmsconnect.PmsConnect().get_library_details()
@ -81,10 +81,10 @@ def refresh_libraries():
# # Start library labels update on it's own thread
# threading.Thread(target=libraries.update_labels).start()
logger.info(u"Tautulli Libraries :: Libraries list refreshed.")
logger.info("Tautulli Libraries :: Libraries list refreshed.")
return True
else:
logger.warn(u"Tautulli Libraries :: Unable to refresh libraries list.")
logger.warn("Tautulli Libraries :: Unable to refresh libraries list.")
return False
@ -100,9 +100,9 @@ def update_section_ids():
query = 'SELECT section_id, section_type FROM library_sections'
library_results = monitor_db.select(query=query)
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for update_section_ids: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for update_section_ids: %s." % e)
logger.warn(u"Tautulli Libraries :: Unable to update section_id's in database.")
logger.warn("Tautulli Libraries :: Unable to update section_id's in database.")
plexpy.CONFIG.UPDATE_SECTION_IDS = 1
plexpy.CONFIG.write()
return None
@ -112,7 +112,7 @@ def update_section_ids():
plexpy.CONFIG.write()
return None
logger.debug(u"Tautulli Libraries :: Updating section_id's in database.")
logger.debug("Tautulli Libraries :: Updating section_id's in database.")
# Get rating_key: section_id mapping pairs
key_mappings = {}
@ -129,7 +129,7 @@ def update_section_ids():
children_list = library_children['children_list']
key_mappings.update({child['rating_key']: child['section_id'] for child in children_list})
else:
logger.warn(u"Tautulli Libraries :: Unable to get a list of library items for section_id %s." % section_id)
logger.warn("Tautulli Libraries :: Unable to get a list of library items for section_id %s." % section_id)
error_keys = set()
for item in history_results:
@ -147,10 +147,10 @@ def update_section_ids():
error_keys.add(item['rating_key'])
if error_keys:
logger.info(u"Tautulli Libraries :: Updated all section_id's in database except for rating_keys: %s." %
logger.info("Tautulli Libraries :: Updated all section_id's in database except for rating_keys: %s." %
', '.join(str(key) for key in error_keys))
else:
logger.info(u"Tautulli Libraries :: Updated all section_id's in database.")
logger.info("Tautulli Libraries :: Updated all section_id's in database.")
plexpy.CONFIG.UPDATE_SECTION_IDS = 0
plexpy.CONFIG.write()
@ -166,9 +166,9 @@ def update_labels():
query = 'SELECT section_id, section_type FROM library_sections'
library_results = monitor_db.select(query=query)
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for update_labels: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for update_labels: %s." % e)
logger.warn(u"Tautulli Libraries :: Unable to update labels in database.")
logger.warn("Tautulli Libraries :: Unable to update labels in database.")
plexpy.CONFIG.UPDATE_LABELS = 1
plexpy.CONFIG.write()
return None
@ -178,7 +178,7 @@ def update_labels():
plexpy.CONFIG.write()
return None
logger.debug(u"Tautulli Libraries :: Updating labels in database.")
logger.debug("Tautulli Libraries :: Updating labels in database.")
# Get rating_key: section_id mapping pairs
key_mappings = {}
@ -209,7 +209,7 @@ def update_labels():
key_mappings[rating_key] = [label['label_title']]
else:
logger.warn(u"Tautulli Libraries :: Unable to get a list of library items for section_id %s."
logger.warn("Tautulli Libraries :: Unable to get a list of library items for section_id %s."
% section_id)
error_keys = set()
@ -223,10 +223,10 @@ def update_labels():
error_keys.add(rating_key)
if error_keys:
logger.info(u"Tautulli Libraries :: Updated all labels in database except for rating_keys: %s." %
logger.info("Tautulli Libraries :: Updated all labels in database except for rating_keys: %s." %
', '.join(str(key) for key in error_keys))
else:
logger.info(u"Tautulli Libraries :: Updated all labels in database.")
logger.info("Tautulli Libraries :: Updated all labels in database.")
plexpy.CONFIG.UPDATE_LABELS = 0
plexpy.CONFIG.write()
@ -305,7 +305,7 @@ class Libraries(object):
['session_history_metadata.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_list: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_list: %s." % e)
return default_return
result = query['result']
@ -374,19 +374,19 @@ class Libraries(object):
return default_return
if section_id and not str(section_id).isdigit():
logger.warn(u"Tautulli Libraries :: Datatable media info called but invalid section_id provided.")
logger.warn("Tautulli Libraries :: Datatable media info called but invalid section_id provided.")
return default_return
elif rating_key and not str(rating_key).isdigit():
logger.warn(u"Tautulli Libraries :: Datatable media info called but invalid rating_key provided.")
logger.warn("Tautulli Libraries :: Datatable media info called but invalid rating_key provided.")
return default_return
elif not section_id and not rating_key:
logger.warn(u"Tautulli Libraries :: Datatable media info called but no input provided.")
logger.warn("Tautulli Libraries :: Datatable media info called but no input provided.")
return default_return
# Get the library details
library_details = self.get_details(section_id=section_id)
if library_details['section_id'] == None:
logger.debug(u"Tautulli Libraries :: Library section_id %s not found." % section_id)
logger.debug("Tautulli Libraries :: Library section_id %s not found." % section_id)
return default_return
if not section_type:
@ -416,7 +416,7 @@ class Libraries(object):
'GROUP BY session_history.%s ' % (count_by, group_by)
result = monitor_db.select(query, args=[section_id])
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e)
return default_return
watched_list = {}
@ -433,8 +433,8 @@ class Libraries(object):
rows = json.load(inFile)
library_count = len(rows)
except IOError as e:
#logger.debug(u"Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug(u"Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
pass
elif section_id:
try:
@ -443,8 +443,8 @@ class Libraries(object):
rows = json.load(inFile)
library_count = len(rows)
except IOError as e:
#logger.debug(u"Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug(u"Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
pass
# If no cache was imported, get all library children items
@ -464,7 +464,7 @@ class Libraries(object):
library_count = library_children['library_count']
children_list = library_children['children_list']
else:
logger.warn(u"Tautulli Libraries :: Unable to get a list of library items.")
logger.warn("Tautulli Libraries :: Unable to get a list of library items.")
return default_return
new_rows = []
@ -509,14 +509,14 @@ class Libraries(object):
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug(u"Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
logger.debug("Tautulli Libraries :: Unable to create cache file for rating_key %s." % rating_key)
elif section_id:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug(u"Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id)
logger.debug("Tautulli Libraries :: Unable to create cache file for section_id %s." % section_id)
# Update the last_played and play_count
for item in rows:
@ -586,16 +586,16 @@ class Libraries(object):
return False
if section_id and not str(section_id).isdigit():
logger.warn(u"Tautulli Libraries :: Datatable media info file size called but invalid section_id provided.")
logger.warn("Tautulli Libraries :: Datatable media info file size called but invalid section_id provided.")
return False
elif rating_key and not str(rating_key).isdigit():
logger.warn(u"Tautulli Libraries :: Datatable media info file size called but invalid rating_key provided.")
logger.warn("Tautulli Libraries :: Datatable media info file size called but invalid rating_key provided.")
return False
# Get the library details
library_details = self.get_details(section_id=section_id)
if library_details['section_id'] == None:
logger.debug(u"Tautulli Libraries :: Library section_id %s not found." % section_id)
logger.debug("Tautulli Libraries :: Library section_id %s not found." % section_id)
return False
if library_details['section_type'] == 'photo':
return False
@ -603,24 +603,24 @@ class Libraries(object):
rows = []
# Import media info cache from json file
if rating_key:
#logger.debug(u"Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: Getting file sizes for rating_key %s." % rating_key)
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key))
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
#logger.debug(u"Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug(u"Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: No JSON file for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key)
pass
elif section_id:
logger.debug(u"Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
logger.debug("Tautulli Libraries :: Getting file sizes for section_id %s." % section_id)
try:
inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
with open(inFilePath, 'r') as inFile:
rows = json.load(inFile)
except IOError as e:
#logger.debug(u"Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug(u"Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: No JSON file for library section_id %s." % section_id)
#logger.debug("Tautulli Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id)
pass
# Get the total file size for each item
@ -653,20 +653,20 @@ class Libraries(object):
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug(u"Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key)
elif section_id:
try:
outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id)
with open(outFilePath, 'w') as outFile:
json.dump(rows, outFile)
except IOError as e:
logger.debug(u"Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
logger.debug("Tautulli Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id)
if rating_key:
#logger.debug(u"Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
#logger.debug("Tautulli Libraries :: File sizes updated for rating_key %s." % rating_key)
pass
elif section_id:
logger.debug(u"Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
logger.debug("Tautulli Libraries :: File sizes updated for section_id %s." % section_id)
return True
@ -682,7 +682,7 @@ class Libraries(object):
try:
monitor_db.upsert('library_sections', value_dict, key_dict)
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for set_config: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for set_config: %s." % e)
def get_details(self, section_id=None):
default_return = {'section_id': 0,
@ -716,7 +716,7 @@ class Libraries(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_details: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_details: %s." % e)
result = []
library_details = {}
@ -750,7 +750,7 @@ class Libraries(object):
return library_details
else:
logger.warn(u"Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
logger.warn("Tautulli Libraries :: Unable to retrieve library %s from database. Requesting library list refresh."
% section_id)
# Let's first refresh the libraries list to make sure the library isn't newly added and not in the db yet
refresh_libraries()
@ -761,7 +761,7 @@ class Libraries(object):
return library_details
else:
logger.warn(u"Tautulli Users :: Unable to retrieve library %s from database. Returning 'Local' library."
logger.warn("Tautulli Users :: Unable to retrieve library %s from database. Returning 'Local' library."
% section_id)
# If there is no library data we must return something
return default_return
@ -806,7 +806,7 @@ class Libraries(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_watch_time_stats: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_watch_time_stats: %s." % e)
result = []
for item in result:
@ -854,7 +854,7 @@ class Libraries(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_user_stats: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_user_stats: %s." % e)
result = []
for item in result:
@ -893,7 +893,7 @@ class Libraries(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_recently_watched: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_recently_watched: %s." % e)
result = []
for row in result:
@ -935,7 +935,7 @@ class Libraries(object):
'FROM library_sections WHERE deleted_section = 0'
result = monitor_db.select(query=query)
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for get_sections: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for get_sections: %s." % e)
return None
libraries = []
@ -954,7 +954,7 @@ class Libraries(object):
try:
if section_id.isdigit():
logger.info(u"Tautulli Libraries :: Deleting all history for library id %s from database." % section_id)
logger.info("Tautulli Libraries :: Deleting all history for library id %s from database." % section_id)
session_history_media_info_del = \
monitor_db.action('DELETE FROM '
'session_history_media_info '
@ -978,7 +978,7 @@ class Libraries(object):
else:
return 'Unable to delete items, section_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for delete_all_history: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for delete_all_history: %s." % e)
def delete(self, section_id=None):
monitor_db = database.MonitorDatabase()
@ -986,7 +986,7 @@ class Libraries(object):
try:
if section_id.isdigit():
self.delete_all_history(section_id)
logger.info(u"Tautulli Libraries :: Deleting library with id %s from database." % section_id)
logger.info("Tautulli Libraries :: Deleting library with id %s from database." % section_id)
monitor_db.action('UPDATE library_sections SET deleted_section = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET keep_history = 0 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify = 0 WHERE section_id = ?', [section_id])
@ -1002,7 +1002,7 @@ class Libraries(object):
else:
return 'Unable to delete library, section_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for delete: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for delete: %s." % e)
def undelete(self, section_id=None, section_name=None):
monitor_db = database.MonitorDatabase()
@ -1012,7 +1012,7 @@ class Libraries(object):
query = 'SELECT * FROM library_sections WHERE section_id = ?'
result = monitor_db.select(query=query, args=[section_id])
if result:
logger.info(u"Tautulli Libraries :: Re-adding library with id %s to database." % section_id)
logger.info("Tautulli Libraries :: Re-adding library with id %s to database." % section_id)
monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_id = ?', [section_id])
monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_id = ?', [section_id])
@ -1025,7 +1025,7 @@ class Libraries(object):
query = 'SELECT * FROM library_sections WHERE section_name = ?'
result = monitor_db.select(query=query, args=[section_name])
if result:
logger.info(u"Tautulli Libraries :: Re-adding library with name %s to database." % section_name)
logger.info("Tautulli Libraries :: Re-adding library with name %s to database." % section_name)
monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_name = ?', [section_name])
monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_name = ?', [section_name])
@ -1035,7 +1035,7 @@ class Libraries(object):
return False
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to execute database query for undelete: %s." % e)
logger.warn("Tautulli Libraries :: Unable to execute database query for undelete: %s." % e)
def delete_media_info_cache(self, section_id=None):
import os
@ -1045,12 +1045,12 @@ class Libraries(object):
[os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, f)) for f in os.listdir(plexpy.CONFIG.CACHE_DIR)
if f.startswith('media_info_%s' % section_id) and f.endswith('.json')]
logger.debug(u"Tautulli Libraries :: Deleted media info table cache for section_id %s." % section_id)
logger.debug("Tautulli Libraries :: Deleted media info table cache for section_id %s." % section_id)
return 'Deleted media info table cache for library with id %s.' % section_id
else:
return 'Unable to delete media info table cache, section_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to delete media info table cache: %s." % e)
logger.warn("Tautulli Libraries :: Unable to delete media info table cache: %s." % e)
def delete_duplicate_libraries(self):
monitor_db = database.MonitorDatabase()
@ -1061,16 +1061,16 @@ class Libraries(object):
server_id = plexpy.CONFIG.PMS_IDENTIFIER
try:
logger.debug(u"Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id)
logger.debug("Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id)
monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id])
return 'Deleted duplicate libraries from the database.'
except Exception as e:
logger.warn(u"Tautulli Libraries :: Unable to delete duplicate libraries: %s." % e)
logger.warn("Tautulli Libraries :: Unable to delete duplicate libraries: %s." % e)
def update_libraries_db_notify():
logger.info(u"Tautulli Libraries :: Upgrading library notification toggles...")
logger.info("Tautulli Libraries :: Upgrading library notification toggles...")
# Set flag first in case something fails we don't want to keep re-adding the notifiers
plexpy.CONFIG.__setattr__('UPDATE_LIBRARIES_DB_NOTIFY', 0)

View file

@ -64,13 +64,13 @@ def add_mobile_device(device_id=None, device_name=None, device_token=None, frien
try:
result = db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
except Exception as e:
logger.warn(u"Tautulli MobileApp :: Failed to register mobile device in the database: %s." % e)
logger.warn("Tautulli MobileApp :: Failed to register mobile device in the database: %s." % e)
return
if result == 'insert':
logger.info(u"Tautulli MobileApp :: Registered mobile device '%s' in the database." % device_name)
logger.info("Tautulli MobileApp :: Registered mobile device '%s' in the database." % device_name)
else:
logger.debug(u"Tautulli MobileApp :: Re-registered mobile device '%s' in the database." % device_name)
logger.debug("Tautulli MobileApp :: Re-registered mobile device '%s' in the database." % device_name)
return True
@ -79,7 +79,7 @@ def get_mobile_device_config(mobile_device_id=None):
if str(mobile_device_id).isdigit():
mobile_device_id = int(mobile_device_id)
else:
logger.error(u"Tautulli MobileApp :: Unable to retrieve mobile device config: invalid mobile_device_id %s." % mobile_device_id)
logger.error("Tautulli MobileApp :: Unable to retrieve mobile device config: invalid mobile_device_id %s." % mobile_device_id)
return None
db = database.MonitorDatabase()
@ -93,7 +93,7 @@ def set_mobile_device_config(mobile_device_id=None, **kwargs):
if str(mobile_device_id).isdigit():
mobile_device_id = int(mobile_device_id)
else:
logger.error(u"Tautulli MobileApp :: Unable to set exisiting mobile device: invalid mobile_device_id %s." % mobile_device_id)
logger.error("Tautulli MobileApp :: Unable to set exisiting mobile device: invalid mobile_device_id %s." % mobile_device_id)
return False
keys = {'id': mobile_device_id}
@ -105,10 +105,10 @@ def set_mobile_device_config(mobile_device_id=None, **kwargs):
db = database.MonitorDatabase()
try:
db.upsert(table_name='mobile_devices', key_dict=keys, value_dict=values)
logger.info(u"Tautulli MobileApp :: Updated mobile device agent: mobile_device_id %s." % mobile_device_id)
logger.info("Tautulli MobileApp :: Updated mobile device agent: mobile_device_id %s." % mobile_device_id)
return True
except Exception as e:
logger.warn(u"Tautulli MobileApp :: Unable to update mobile device: %s." % e)
logger.warn("Tautulli MobileApp :: Unable to update mobile device: %s." % e)
return False
@ -116,7 +116,7 @@ def delete_mobile_device(mobile_device_id=None):
db = database.MonitorDatabase()
if mobile_device_id:
logger.debug(u"Tautulli MobileApp :: Deleting device_id %s from the database." % mobile_device_id)
logger.debug("Tautulli MobileApp :: Deleting device_id %s from the database." % mobile_device_id)
result = db.action('DELETE FROM mobile_devices WHERE id = ?', args=[mobile_device_id])
return True
else:
@ -132,7 +132,7 @@ def set_last_seen(device_token=None):
result = db.action('UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?',
args=[last_seen, device_token])
except Exception as e:
logger.warn(u"Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e)
logger.warn("Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e)
return

View file

@ -30,7 +30,7 @@ NEWSLETTER_SCHED = None
def add_newsletter_each(newsletter_id=None, notify_action=None, **kwargs):
if not notify_action:
logger.debug(u"Tautulli NewsletterHandler :: Notify called but no action received.")
logger.debug("Tautulli NewsletterHandler :: Notify called but no action received.")
return
data = {'newsletter': True,
@ -58,19 +58,19 @@ def schedule_newsletter_job(newsletter_job_id, name='', func=None, remove_job=Fa
if NEWSLETTER_SCHED.get_job(newsletter_job_id):
if remove_job:
NEWSLETTER_SCHED.remove_job(newsletter_job_id)
logger.info(u"Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
logger.info("Tautulli NewsletterHandler :: Removed scheduled newsletter: %s" % name)
else:
NEWSLETTER_SCHED.reschedule_job(
newsletter_job_id, args=args, trigger=CronTrigger.from_crontab(cron))
logger.info(u"Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
logger.info("Tautulli NewsletterHandler :: Re-scheduled newsletter: %s" % name)
elif not remove_job:
NEWSLETTER_SCHED.add_job(
func, args=args, id=newsletter_job_id, trigger=CronTrigger.from_crontab(cron))
logger.info(u"Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
logger.info("Tautulli NewsletterHandler :: Scheduled newsletter: %s" % name)
def notify(newsletter_id=None, notify_action=None, **kwargs):
logger.info(u"Tautulli NewsletterHandler :: Preparing newsletter for newsletter_id %s." % newsletter_id)
logger.info("Tautulli NewsletterHandler :: Preparing newsletter for newsletter_id %s." % newsletter_id)
newsletter_config = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
@ -149,7 +149,7 @@ def set_notify_state(newsletter, notify_action, subject, body, message, filename
db.upsert(table_name='newsletter_log', key_dict=keys, value_dict=values)
return db.last_insert_id()
else:
logger.error(u"Tautulli NewsletterHandler :: Unable to set notify state.")
logger.error("Tautulli NewsletterHandler :: Unable to set notify state.")
def set_notify_success(newsletter_log_id):
@ -202,6 +202,6 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None):
newsletter = n_file.read()
return newsletter
except OSError as e:
logger.error(u"Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
logger.error("Tautulli NewsletterHandler :: Failed to retrieve newsletter '%s': %s" % (newsletter_uuid, e))
else:
logger.warn(u"Tautulli NewsletterHandler :: Newsletter file '%s' is missing." % newsletter_file)
logger.warn("Tautulli NewsletterHandler :: Newsletter file '%s' is missing." % newsletter_file)

View file

@ -117,7 +117,7 @@ def delete_newsletter(newsletter_id=None):
db = database.MonitorDatabase()
if str(newsletter_id).isdigit():
logger.debug(u"Tautulli Newsletters :: Deleting newsletter_id %s from the database."
logger.debug("Tautulli Newsletters :: Deleting newsletter_id %s from the database."
% newsletter_id)
result = db.action('DELETE FROM newsletters WHERE id = ?', args=[newsletter_id])
return True
@ -129,7 +129,7 @@ def get_newsletter_config(newsletter_id=None, mask_passwords=False):
if str(newsletter_id).isdigit():
newsletter_id = int(newsletter_id)
else:
logger.error(u"Tautulli Newsletters :: Unable to retrieve newsletter config: invalid newsletter_id %s."
logger.error("Tautulli Newsletters :: Unable to retrieve newsletter config: invalid newsletter_id %s."
% newsletter_id)
return None
@ -150,7 +150,7 @@ def get_newsletter_config(newsletter_id=None, mask_passwords=False):
config=config, email_config=email_config,
subject=subject, body=body, message=message)
except Exception as e:
logger.error(u"Tautulli Newsletters :: Failed to get newsletter config options: %s." % e)
logger.error("Tautulli Newsletters :: Failed to get newsletter config options: %s." % e)
return
if mask_passwords:
@ -171,14 +171,14 @@ def add_newsletter_config(agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error(u"Tautulli Newsletters :: Unable to add new newsletter: invalid agent_id %s."
logger.error("Tautulli Newsletters :: Unable to add new newsletter: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_newsletter_agents() if a['id'] == agent_id), None)
if not agent:
logger.error(u"Tautulli Newsletters :: Unable to retrieve new newsletter agent: invalid agent_id %s."
logger.error("Tautulli Newsletters :: Unable to retrieve new newsletter agent: invalid agent_id %s."
% agent_id)
return False
@ -201,12 +201,12 @@ def add_newsletter_config(agent_id=None, **kwargs):
try:
db.upsert(table_name='newsletters', key_dict=keys, value_dict=values)
newsletter_id = db.last_insert_id()
logger.info(u"Tautulli Newsletters :: Added new newsletter agent: %s (newsletter_id %s)."
logger.info("Tautulli Newsletters :: Added new newsletter agent: %s (newsletter_id %s)."
% (agent['label'], newsletter_id))
blacklist_logger()
return newsletter_id
except Exception as e:
logger.warn(u"Tautulli Newsletters :: Unable to add newsletter agent: %s." % e)
logger.warn("Tautulli Newsletters :: Unable to add newsletter agent: %s." % e)
return False
@ -214,14 +214,14 @@ def set_newsletter_config(newsletter_id=None, agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error(u"Tautulli Newsletters :: Unable to set existing newsletter: invalid agent_id %s."
logger.error("Tautulli Newsletters :: Unable to set existing newsletter: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_newsletter_agents() if a['id'] == agent_id), None)
if not agent:
logger.error(u"Tautulli Newsletters :: Unable to retrieve existing newsletter agent: invalid agent_id %s."
logger.error("Tautulli Newsletters :: Unable to retrieve existing newsletter agent: invalid agent_id %s."
% agent_id)
return False
@ -266,13 +266,13 @@ def set_newsletter_config(newsletter_id=None, agent_id=None, **kwargs):
db = database.MonitorDatabase()
try:
db.upsert(table_name='newsletters', key_dict=keys, value_dict=values)
logger.info(u"Tautulli Newsletters :: Updated newsletter agent: %s (newsletter_id %s)."
logger.info("Tautulli Newsletters :: Updated newsletter agent: %s (newsletter_id %s)."
% (agent['label'], newsletter_id))
newsletter_handler.schedule_newsletters(newsletter_id=newsletter_id)
blacklist_logger()
return True
except Exception as e:
logger.warn(u"Tautulli Newsletters :: Unable to update newsletter agent: %s." % e)
logger.warn("Tautulli Newsletters :: Unable to update newsletter agent: %s." % e)
return False
@ -287,7 +287,7 @@ def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, n
messsage=message)
return agent.send()
else:
logger.debug(u"Tautulli Newsletters :: Notification requested but no newsletter_id received.")
logger.debug("Tautulli Newsletters :: Notification requested but no newsletter_id received.")
def blacklist_logger():
@ -492,11 +492,11 @@ class Newsletter(object):
self.newsletter = self.generate_newsletter()
if self.template_error:
logger.error(u"Tautulli Newsletters :: %s newsletter failed to render template. Newsletter not sent." % self.NAME)
logger.error("Tautulli Newsletters :: %s newsletter failed to render template. Newsletter not sent." % self.NAME)
return False
if not self._has_data():
logger.warn(u"Tautulli Newsletters :: %s newsletter has no data. Newsletter not sent." % self.NAME)
logger.warn("Tautulli Newsletters :: %s newsletter has no data. Newsletter not sent." % self.NAME)
return False
self._save()
@ -522,9 +522,9 @@ class Newsletter(object):
n_file.write(line + '\r\n')
#n_file.write(line.strip())
logger.info(u"Tautulli Newsletters :: %s newsletter saved to '%s'" % (self.NAME, newsletter_file))
logger.info("Tautulli Newsletters :: %s newsletter saved to '%s'" % (self.NAME, newsletter_file))
except OSError as e:
logger.error(u"Tautulli Newsletters :: Failed to save %s newsletter to '%s': %s"
logger.error("Tautulli Newsletters :: Failed to save %s newsletter to '%s': %s"
% (self.NAME, newsletter_file, e))
def _send(self):
@ -608,34 +608,28 @@ class Newsletter(object):
try:
subject = custom_formatter.format(unicode(self.subject), **self.parameters)
except LookupError as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse parameter %s in newsletter subject. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter subject. Using fallback." % e)
subject = unicode(self._DEFAULT_SUBJECT).format(**self.parameters)
except Exception as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
subject = unicode(self._DEFAULT_SUBJECT).format(**self.parameters)
try:
body = custom_formatter.format(unicode(self.body), **self.parameters)
except LookupError as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse parameter %s in newsletter body. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter body. Using fallback." % e)
body = unicode(self._DEFAULT_BODY).format(**self.parameters)
except Exception as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse custom newsletter body: %s. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter body: %s. Using fallback." % e)
body = unicode(self._DEFAULT_BODY).format(**self.parameters)
try:
message = custom_formatter.format(unicode(self.message), **self.parameters)
except LookupError as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse parameter %s in newsletter message. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter message. Using fallback." % e)
message = unicode(self._DEFAULT_MESSAGE).format(**self.parameters)
except Exception as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse custom newsletter message: %s. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter message: %s. Using fallback." % e)
message = unicode(self._DEFAULT_MESSAGE).format(**self.parameters)
return subject, body, message
@ -647,12 +641,10 @@ class Newsletter(object):
try:
filename = custom_formatter.format(unicode(self.filename), **self.parameters)
except LookupError as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse parameter %s in newsletter filename. Using fallback." % e)
filename = unicode(self._DEFAULT_FILENAME).format(**self.parameters)
except Exception as e:
logger.error(
u"Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
logger.error("Tautulli Newsletter :: Unable to parse custom newsletter subject: %s. Using fallback." % e)
filename = unicode(self._DEFAULT_FILENAME).format(**self.parameters)
return filename
@ -809,7 +801,7 @@ class RecentlyAdded(Newsletter):
from notification_handler import get_img_info, set_hash_image_info
if not self.config['incl_libraries']:
logger.warn(u"Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
logger.warn("Tautulli Newsletters :: Failed to retrieve %s newsletter data: no libraries selected." % self.NAME)
media_types = set()
for s in self._get_sections():

View file

@ -62,15 +62,15 @@ def process_queue():
else:
add_notifier_each(**params)
except Exception as e:
logger.exception(u"Tautulli NotificationHandler :: Notification thread exception: %s" % e)
logger.exception("Tautulli NotificationHandler :: Notification thread exception: %s" % e)
queue.task_done()
logger.info(u"Tautulli NotificationHandler :: Notification thread exiting...")
logger.info("Tautulli NotificationHandler :: Notification thread exiting...")
def start_threads(num_threads=1):
logger.info(u"Tautulli NotificationHandler :: Starting background notification handler ({} threads).".format(num_threads))
logger.info("Tautulli NotificationHandler :: Starting background notification handler ({} threads).".format(num_threads))
for x in range(num_threads):
thread = threading.Thread(target=process_queue)
thread.daemon = True
@ -79,7 +79,7 @@ def start_threads(num_threads=1):
def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, timeline_data=None, manual_trigger=False, **kwargs):
if not notify_action:
logger.debug(u"Tautulli NotificationHandler :: Notify called but no action received.")
logger.debug("Tautulli NotificationHandler :: Notify called but no action received.")
return
if notifier_id:
@ -111,7 +111,7 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti
**kwargs)
if not parameters:
logger.error(u"Tautulli NotificationHandler :: Failed to build notification parameters.")
logger.error("Tautulli NotificationHandler :: Failed to build notification parameters.")
return
for notifier in notifiers_enabled:
@ -127,7 +127,7 @@ def add_notifier_each(notifier_id=None, notify_action=None, stream_data=None, ti
data.update(kwargs)
plexpy.NOTIFY_QUEUE.put(data)
else:
logger.debug(u"Tautulli NotificationHandler :: Custom notification conditions not satisfied, skipping notifier_id %s." % notifier['id'])
logger.debug("Tautulli NotificationHandler :: Custom notification conditions not satisfied, skipping notifier_id %s." % notifier['id'])
# Add on_concurrent and on_newdevice to queue if action is on_play
if notify_action == 'on_play':
@ -147,11 +147,11 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
# library_details = library_data.get_details(section_id=stream_data['section_id'])
# if not user_details['do_notify']:
# logger.debug(u"Tautulli NotificationHandler :: Notifications for user '%s' are disabled." % user_details['username'])
# logger.debug("Tautulli NotificationHandler :: Notifications for user '%s' are disabled." % user_details['username'])
# return False
#
# elif not library_details['do_notify'] and notify_action not in ('on_concurrent', 'on_newdevice'):
# logger.debug(u"Tautulli NotificationHandler :: Notifications for library '%s' are disabled." % library_details['section_name'])
# logger.debug("Tautulli NotificationHandler :: Notifications for library '%s' are disabled." % library_details['section_name'])
# return False
if notify_action == 'on_concurrent':
@ -201,7 +201,7 @@ def notify_conditions(notify_action=None, stream_data=None, timeline_data=None):
# library_details = library_data.get_details(section_id=timeline_data['section_id'])
#
# if not library_details['do_notify_created']:
# # logger.debug(u"Tautulli NotificationHandler :: Notifications for library '%s' is disabled." % library_details['section_name'])
# # logger.debug("Tautulli NotificationHandler :: Notifications for library '%s' is disabled." % library_details['section_name'])
# return False
return True
@ -218,7 +218,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
custom_conditions = notifier_config['custom_conditions']
if custom_conditions_logic or any(c for c in custom_conditions if c['value']):
logger.debug(u"Tautulli NotificationHandler :: Checking custom notification conditions for notifier_id %s."
logger.debug("Tautulli NotificationHandler :: Checking custom notification conditions for notifier_id %s."
% notifier_id)
logic_groups = None
@ -227,7 +227,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
# Parse and validate the custom conditions logic
logic_groups = helpers.parse_condition_logic_string(custom_conditions_logic, len(custom_conditions))
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom condition logic '%s': %s."
logger.error("Tautulli NotificationHandler :: Unable to parse custom condition logic '%s': %s."
% (custom_conditions_logic, e))
return False
@ -244,7 +244,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
if not parameter or not operator or not values:
evaluated = True
evaluated_conditions.append(evaluated)
logger.debug(u"Tautulli NotificationHandler :: {%s} Blank condition > %s" % (i+1, evaluated))
logger.debug("Tautulli NotificationHandler :: {%s} Blank condition > %s" % (i+1, evaluated))
continue
# Make sure the condition values is in a list
@ -263,7 +263,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
values = [helpers.cast_to_float(v) for v in values]
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast condition '%s', values '%s', to type '%s'."
% (i+1, parameter, values, parameter_type))
return False
@ -279,7 +279,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
parameter_value = helpers.cast_to_float(parameter_value)
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
logger.error("Tautulli NotificationHandler :: {%s} Unable to cast parameter '%s', value '%s', to type '%s'."
% (i+1, parameter, parameter_value, parameter_type))
return False
@ -310,28 +310,28 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
else:
evaluated = None
logger.warn(u"Tautulli NotificationHandler :: {%s} Invalid condition operator '%s' > %s."
logger.warn("Tautulli NotificationHandler :: {%s} Invalid condition operator '%s' > %s."
% (i+1, operator, evaluated))
evaluated_conditions.append(evaluated)
logger.debug(u"Tautulli NotificationHandler :: {%s} %s | %s | %s > '%s' > %s"
logger.debug("Tautulli NotificationHandler :: {%s} %s | %s | %s > '%s' > %s"
% (i+1, parameter, operator, ' or '.join(["'%s'" % v for v in values]), parameter_value, evaluated))
if logic_groups:
# Format and evaluate the logic string
try:
evaluated_logic = helpers.eval_logic_groups_to_bool(logic_groups, evaluated_conditions)
logger.debug(u"Tautulli NotificationHandler :: Condition logic: %s > %s"
logger.debug("Tautulli NotificationHandler :: Condition logic: %s > %s"
% (custom_conditions_logic, evaluated_logic))
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to evaluate custom condition logic: %s." % e)
logger.error("Tautulli NotificationHandler :: Unable to evaluate custom condition logic: %s." % e)
return False
else:
evaluated_logic = all(evaluated_conditions[1:])
logger.debug(u"Tautulli NotificationHandler :: Condition logic [blank]: %s > %s"
logger.debug("Tautulli NotificationHandler :: Condition logic [blank]: %s > %s"
% (' and '.join(['{%s}' % (i+1) for i in range(len(custom_conditions))]), evaluated_logic))
logger.debug(u"Tautulli NotificationHandler :: Custom conditions evaluated to '{}'. Conditions: {}.".format(
logger.debug("Tautulli NotificationHandler :: Custom conditions evaluated to '{}'. Conditions: {}.".format(
evaluated_logic, evaluated_conditions[1:]))
return evaluated_logic
@ -340,7 +340,7 @@ def notify_custom_conditions(notifier_id=None, parameters=None):
def notify(notifier_id=None, notify_action=None, stream_data=None, timeline_data=None, parameters=None, **kwargs):
logger.info(u"Tautulli NotificationHandler :: Preparing notification for notifier_id %s." % notifier_id)
logger.info("Tautulli NotificationHandler :: Preparing notification for notifier_id %s." % notifier_id)
notifier_config = notifiers.get_notifier_config(notifier_id=notifier_id)
@ -456,7 +456,7 @@ def set_notify_state(notifier, notify_action, subject='', body='', script_args='
monitor_db.upsert(table_name='notify_log', key_dict=keys, value_dict=values)
return monitor_db.last_insert_id()
else:
logger.error(u"Tautulli NotificationHandler :: Unable to set notify state.")
logger.error("Tautulli NotificationHandler :: Unable to set notify state.")
def set_notify_success(notification_id):
@ -1049,10 +1049,10 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
# Make sure subject and body text are strings
if not isinstance(subject, basestring):
logger.error(u"Tautulli NotificationHandler :: Invalid subject text. Using fallback.")
logger.error("Tautulli NotificationHandler :: Invalid subject text. Using fallback.")
subject = default_subject
if not isinstance(body, basestring):
logger.error(u"Tautulli NotificationHandler :: Invalid body text. Using fallback.")
logger.error("Tautulli NotificationHandler :: Invalid body text. Using fallback.")
body = default_body
media_type = parameters.get('media_type')
@ -1093,10 +1093,10 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
try:
script_args = [str_formatter(arg) for arg in helpers.split_args(subject)]
except LookupError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse parameter %s in script argument. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in script argument. Using fallback." % e)
script_args = []
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom script arguments: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom script arguments: %s. Using fallback." % e)
script_args = []
elif agent_id == 25:
@ -1104,53 +1104,51 @@ def build_notify_text(subject='', body='', notify_action=None, parameters=None,
try:
subject = json.loads(subject)
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom webhook json header data: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook json header data: %s. Using fallback." % e)
subject = ''
if subject:
try:
subject = json.dumps(helpers.traverse_map(subject, str_formatter))
except LookupError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse parameter %s in webhook header data. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in webhook header data. Using fallback." % e)
subject = ''
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom webhook header data: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook header data: %s. Using fallback." % e)
subject = ''
if body:
try:
body = json.loads(body)
except ValueError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom webhook json body data: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook json body data: %s. Using fallback." % e)
body = ''
if body:
try:
body = json.dumps(helpers.traverse_map(body, str_formatter))
except LookupError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse parameter %s in webhook body data. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in webhook body data. Using fallback." % e)
body = ''
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom webhook body data: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom webhook body data: %s. Using fallback." % e)
body = ''
else:
try:
subject = str_formatter(subject)
except LookupError as e:
logger.error(
u"Tautulli NotificationHandler :: Unable to parse parameter %s in notification subject. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in notification subject. Using fallback." % e)
subject = unicode(default_subject).format(**parameters)
except Exception as e:
logger.error(
u"Tautulli NotificationHandler :: Unable to parse custom notification subject: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom notification subject: %s. Using fallback." % e)
subject = unicode(default_subject).format(**parameters)
try:
body = str_formatter(body)
except LookupError as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse parameter %s in notification body. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse parameter %s in notification body. Using fallback." % e)
body = unicode(default_body).format(**parameters)
except Exception as e:
logger.error(u"Tautulli NotificationHandler :: Unable to parse custom notification body: %s. Using fallback." % e)
logger.error("Tautulli NotificationHandler :: Unable to parse custom notification body: %s. Using fallback." % e)
body = unicode(default_body).format(**parameters)
return subject, body, script_args
@ -1364,16 +1362,16 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
'WHERE rating_key = ?'
tvmaze_info = db.select_single(query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e)
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e)
return {}
if not tvmaze_info:
tvmaze_info = {}
if thetvdb_id:
logger.debug(u"Tautulli NotificationHandler :: Looking up TVmaze info for thetvdb_id '{}'.".format(thetvdb_id))
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for thetvdb_id '{}'.".format(thetvdb_id))
else:
logger.debug(u"Tautulli NotificationHandler :: Looking up TVmaze info for imdb_id '{}'.".format(imdb_id))
logger.debug("Tautulli NotificationHandler :: Looking up TVmaze info for imdb_id '{}'.".format(imdb_id))
params = {'thetvdb': thetvdb_id} if thetvdb_id else {'imdb': imdb_id}
response, err_msg, req_msg = request.request_response2('http://api.tvmaze.com/lookup/shows', params=params)
@ -1398,10 +1396,10 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
else:
if err_msg:
logger.error(u"Tautulli NotificationHandler :: {}".format(err_msg))
logger.error("Tautulli NotificationHandler :: {}".format(err_msg))
if req_msg:
logger.debug(u"Tautulli NotificationHandler :: Request response: {}".format(req_msg))
logger.debug("Tautulli NotificationHandler :: Request response: {}".format(req_msg))
return tvmaze_info
@ -1414,16 +1412,16 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
'WHERE rating_key = ?'
themoviedb_info = db.select_single(query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e)
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e)
return {}
if not themoviedb_info:
themoviedb_info = {}
if thetvdb_id:
logger.debug(u"Tautulli NotificationHandler :: Looking up The Movie Database info for thetvdb_id '{}'.".format(thetvdb_id))
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for thetvdb_id '{}'.".format(thetvdb_id))
else:
logger.debug(u"Tautulli NotificationHandler :: Looking up The Movie Database info for imdb_id '{}'.".format(imdb_id))
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for imdb_id '{}'.".format(imdb_id))
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY,
'external_source': 'tvdb_id' if thetvdb_id else 'imdb_id'
@ -1461,10 +1459,10 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None):
else:
if err_msg:
logger.error(u"Tautulli NotificationHandler :: {}".format(err_msg))
logger.error("Tautulli NotificationHandler :: {}".format(err_msg))
if req_msg:
logger.debug(u"Tautulli NotificationHandler :: Request response: {}".format(req_msg))
logger.debug("Tautulli NotificationHandler :: Request response: {}".format(req_msg))
return themoviedb_info
@ -1480,7 +1478,7 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
'WHERE rating_key = ?'
result = db.select_single(query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli NotificationHandler :: Unable to execute database query for get_themoviedb_info: %s." % e)
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for get_themoviedb_info: %s." % e)
return {}
if result:
@ -1491,7 +1489,7 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
themoviedb_json = {}
logger.debug(u"Tautulli NotificationHandler :: Looking up The Movie Database info for themoviedb_id '{}'.".format(themoviedb_id))
logger.debug("Tautulli NotificationHandler :: Looking up The Movie Database info for themoviedb_id '{}'.".format(themoviedb_id))
params = {'api_key': plexpy.CONFIG.THEMOVIEDB_APIKEY}
response, err_msg, req_msg = request.request_response2('https://api.themoviedb.org/3/{}/{}'.format(media_type, themoviedb_id), params=params)
@ -1514,10 +1512,10 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None):
else:
if err_msg:
logger.error(u"Tautulli NotificationHandler :: {}".format(err_msg))
logger.error("Tautulli NotificationHandler :: {}".format(err_msg))
if req_msg:
logger.debug(u"Tautulli NotificationHandler :: Request response: {}".format(req_msg))
logger.debug("Tautulli NotificationHandler :: Request response: {}".format(req_msg))
return themoviedb_json
@ -1531,7 +1529,7 @@ def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None,
'WHERE rating_key = ?'
musicbrainz_info = db.select_single(query, args=[rating_key])
except Exception as e:
logger.warn(u"Tautulli NotificationHandler :: Unable to execute database query for lookup_musicbrainz: %s." % e)
logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_musicbrainz: %s." % e)
return {}
if not musicbrainz_info:
@ -1542,23 +1540,23 @@ def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None,
)
if musicbrainz_type == 'artist':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{}'.".format(musicbrainz_type, artist))
logger.debug("Tautulli NotificationHandler :: Looking up MusicBrainz info for "
"{} '{}'.".format(musicbrainz_type, artist))
result = musicbrainzngs.search_artists(artist=artist, strict=True, limit=1)
if result['artist-list']:
musicbrainz_info = result['artist-list'][0]
elif musicbrainz_type == 'release':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{} - {}'.".format(musicbrainz_type, artist, release))
logger.debug("Tautulli NotificationHandler :: Looking up MusicBrainz info for "
"{} '{} - {}'.".format(musicbrainz_type, artist, release))
result = musicbrainzngs.search_releases(artist=artist, release=release, tracks=tracks,
strict=True, limit=1)
if result['release-list']:
musicbrainz_info = result['release-list'][0]
elif musicbrainz_type == 'recording':
logger.debug(u"Tautulli NotificationHandler :: Looking up MusicBrainz info for "
u"{} '{} - {} - {}'.".format(musicbrainz_type, artist, release, recording))
logger.debug("Tautulli NotificationHandler :: Looking up MusicBrainz info for "
"{} '{} - {} - {}'.".format(musicbrainz_type, artist, release, recording))
result = musicbrainzngs.search_recordings(artist=artist, release=release, recording=recording,
tracks=tracks, tnum=tnum,
strict=True, limit=1)
@ -1580,7 +1578,7 @@ def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None,
musicbrainz_info.pop('musicbrainz_json')
else:
logger.warning(u"Tautulli NotificationHandler :: No match found on MusicBrainz.")
logger.warn("Tautulli NotificationHandler :: No match found on MusicBrainz.")
return musicbrainz_info

View file

@ -447,7 +447,7 @@ def delete_notifier(notifier_id=None):
db = database.MonitorDatabase()
if str(notifier_id).isdigit():
logger.debug(u"Tautulli Notifiers :: Deleting notifier_id %s from the database."
logger.debug("Tautulli Notifiers :: Deleting notifier_id %s from the database."
% notifier_id)
result = db.action('DELETE FROM notifiers WHERE id = ?', args=[notifier_id])
return True
@ -459,7 +459,7 @@ def get_notifier_config(notifier_id=None, mask_passwords=False):
if str(notifier_id).isdigit():
notifier_id = int(notifier_id)
else:
logger.error(u"Tautulli Notifiers :: Unable to retrieve notifier config: invalid notifier_id %s."
logger.error("Tautulli Notifiers :: Unable to retrieve notifier config: invalid notifier_id %s."
% notifier_id)
return None
@ -473,7 +473,7 @@ def get_notifier_config(notifier_id=None, mask_passwords=False):
config = json.loads(result.pop('notifier_config', '{}'))
notifier_agent = get_agent_class(agent_id=result['agent_id'], config=config)
except Exception as e:
logger.error(u"Tautulli Notifiers :: Failed to get notifier config options: %s." % e)
logger.error("Tautulli Notifiers :: Failed to get notifier config options: %s." % e)
return
if mask_passwords:
@ -517,14 +517,14 @@ def add_notifier_config(agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error(u"Tautulli Notifiers :: Unable to add new notifier: invalid agent_id %s."
logger.error("Tautulli Notifiers :: Unable to add new notifier: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
if not agent:
logger.error(u"Tautulli Notifiers :: Unable to retrieve new notification agent: invalid agent_id %s."
logger.error("Tautulli Notifiers :: Unable to retrieve new notification agent: invalid agent_id %s."
% agent_id)
return False
@ -553,12 +553,12 @@ def add_notifier_config(agent_id=None, **kwargs):
try:
db.upsert(table_name='notifiers', key_dict=keys, value_dict=values)
notifier_id = db.last_insert_id()
logger.info(u"Tautulli Notifiers :: Added new notification agent: %s (notifier_id %s)."
logger.info("Tautulli Notifiers :: Added new notification agent: %s (notifier_id %s)."
% (agent['label'], notifier_id))
blacklist_logger()
return notifier_id
except Exception as e:
logger.warn(u"Tautulli Notifiers :: Unable to add notification agent: %s." % e)
logger.warn("Tautulli Notifiers :: Unable to add notification agent: %s." % e)
return False
@ -566,14 +566,14 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
if str(agent_id).isdigit():
agent_id = int(agent_id)
else:
logger.error(u"Tautulli Notifiers :: Unable to set existing notifier: invalid agent_id %s."
logger.error("Tautulli Notifiers :: Unable to set existing notifier: invalid agent_id %s."
% agent_id)
return False
agent = next((a for a in available_notification_agents() if a['id'] == agent_id), None)
if not agent:
logger.error(u"Tautulli Notifiers :: Unable to retrieve existing notification agent: invalid agent_id %s."
logger.error("Tautulli Notifiers :: Unable to retrieve existing notification agent: invalid agent_id %s."
% agent_id)
return False
@ -614,7 +614,7 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
db = database.MonitorDatabase()
try:
db.upsert(table_name='notifiers', key_dict=keys, value_dict=values)
logger.info(u"Tautulli Notifiers :: Updated notification agent: %s (notifier_id %s)."
logger.info("Tautulli Notifiers :: Updated notification agent: %s (notifier_id %s)."
% (agent['label'], notifier_id))
blacklist_logger()
@ -623,7 +623,7 @@ def set_notifier_config(notifier_id=None, agent_id=None, **kwargs):
return True
except Exception as e:
logger.warn(u"Tautulli Notifiers :: Unable to update notification agent: %s." % e)
logger.warn("Tautulli Notifiers :: Unable to update notification agent: %s." % e)
return False
@ -638,7 +638,7 @@ def send_notification(notifier_id=None, subject='', body='', notify_action='', n
notification_id=notification_id,
**kwargs)
else:
logger.debug(u"Tautulli Notifiers :: Notification requested but no notifier_id received.")
logger.debug("Tautulli Notifiers :: Notification requested but no notifier_id received.")
def blacklist_logger():
@ -813,10 +813,10 @@ class Notifier(object):
def notify(self, subject='', body='', action='', **kwargs):
if self.NAME not in ('Script', 'Webhook'):
if not subject and self.config.get('incl_subject', True):
logger.error(u"Tautulli Notifiers :: %s notification subject cannot be blank." % self.NAME)
logger.error("Tautulli Notifiers :: %s notification subject cannot be blank." % self.NAME)
return
elif not body:
logger.error(u"Tautulli Notifiers :: %s notification body cannot be blank." % self.NAME)
logger.error("Tautulli Notifiers :: %s notification body cannot be blank." % self.NAME)
return
return self.agent_notify(subject=subject, body=body, action=action, **kwargs)
@ -825,11 +825,11 @@ class Notifier(object):
pass
def make_request(self, url, method='POST', **kwargs):
logger.info(u"Tautulli Notifiers :: Sending {name} notification...".format(name=self.NAME))
logger.info("Tautulli Notifiers :: Sending {name} notification...".format(name=self.NAME))
response, err_msg, req_msg = request.request_response2(url, method, **kwargs)
if response and not err_msg:
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
else:
@ -837,13 +837,13 @@ class Notifier(object):
if response is not None and response.status_code >= 400 and response.status_code < 500:
verify_msg = " Verify you notification agent settings are correct."
logger.error(u"Tautulli Notifiers :: {name} notification failed.{msg}".format(msg=verify_msg, name=self.NAME))
logger.error("Tautulli Notifiers :: {name} notification failed.{msg}".format(msg=verify_msg, name=self.NAME))
if err_msg:
logger.error(u"Tautulli Notifiers :: {}".format(err_msg))
logger.error("Tautulli Notifiers :: {}".format(err_msg))
if req_msg:
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(req_msg))
logger.debug("Tautulli Notifiers :: Request response: {}".format(req_msg))
return False
@ -876,7 +876,7 @@ class ANDROIDAPP(Notifier):
# Check mobile device is still registered
device = mobile_app.get_mobile_devices(device_id=self.config['device_id'])
if not device:
logger.warn(u"Tautulli Notifiers :: Unable to send Android app notification: device not registered.")
logger.warn("Tautulli Notifiers :: Unable to send Android app notification: device not registered.")
return
else:
device = device[0]
@ -927,7 +927,7 @@ class ANDROIDAPP(Notifier):
'salt': base64.b64encode(salt)}
}
else:
logger.warn(u"Tautulli Notifiers :: PyCryptodome library is missing. "
logger.warn("Tautulli Notifiers :: PyCryptodome library is missing. "
"Android app notifications will be sent unecrypted. "
"Install the library to encrypt the notifications.")
@ -951,7 +951,7 @@ class ANDROIDAPP(Notifier):
query = 'SELECT * FROM mobile_devices'
result = db.select(query=query)
except Exception as e:
logger.warn(u"Tautulli Notifiers :: Unable to retrieve Android app devices list: %s." % e)
logger.warn("Tautulli Notifiers :: Unable to retrieve Android app devices list: %s." % e)
return {'': ''}
devices = {}
@ -1107,7 +1107,7 @@ class BROWSER(Notifier):
}
def agent_notify(self, subject='', body='', action='', **kwargs):
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
def _return_config_options(self):
@ -1380,11 +1380,11 @@ class EMAIL(Notifier):
mailserver.login(str(self.config['smtp_user']), str(self.config['smtp_password']))
mailserver.sendmail(self.config['from'], recipients, msg.as_string())
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
success = True
except Exception as e:
logger.error(u"Tautulli Notifiers :: {name} notification failed: {e}".format(
logger.error("Tautulli Notifiers :: {name} notification failed: {e}".format(
name=self.NAME, e=str(e).decode('utf-8')))
finally:
@ -1517,7 +1517,7 @@ class FACEBOOK(Notifier):
perms=['publish_to_groups'])
def _get_credentials(self, code=''):
logger.info(u"Tautulli Notifiers :: Requesting access token from {name}.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: Requesting access token from {name}.".format(name=self.NAME))
app_id = plexpy.CONFIG.FACEBOOK_APP_ID
app_secret = plexpy.CONFIG.FACEBOOK_APP_SECRET
@ -1539,7 +1539,7 @@ class FACEBOOK(Notifier):
plexpy.CONFIG.FACEBOOK_TOKEN = response['access_token']
except Exception as e:
logger.error(u"Tautulli Notifiers :: Error requesting {name} access token: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: Error requesting {name} access token: {e}".format(name=self.NAME, e=e))
plexpy.CONFIG.FACEBOOK_TOKEN = ''
# Clear out temporary config values
@ -1555,14 +1555,14 @@ class FACEBOOK(Notifier):
try:
api.put_object(parent_object=self.config['group_id'], connection_name='feed', **data)
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
except Exception as e:
logger.error(u"Tautulli Notifiers :: Error sending {name} post: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: Error sending {name} post: {e}".format(name=self.NAME, e=e))
return False
else:
logger.error(u"Tautulli Notifiers :: Error sending {name} post: No {name} Group ID provided.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: Error sending {name} post: No {name} Group ID provided.".format(name=self.NAME))
return False
def agent_notify(self, subject='', body='', action='', **kwargs):
@ -1700,7 +1700,7 @@ class GROUPME(Notifier):
poster_content = result[0]
else:
poster_content = ''
logger.error(u"Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
if poster_content:
headers = {'X-Access-Token': self.config['access_token'],
@ -1709,14 +1709,14 @@ class GROUPME(Notifier):
r = requests.post('https://image.groupme.com/pictures', headers=headers, data=poster_content)
if r.status_code == 200:
logger.info(u"Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} poster sent.".format(name=self.NAME))
r_content = r.json()
data['attachments'] = [{'type': 'image',
'url': r_content['payload']['picture_url']}]
else:
logger.error(u"Tautulli Notifiers :: {name} poster failed: "
u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
logger.error("Tautulli Notifiers :: {name} poster failed: "
"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
return self.make_request('https://api.groupme.com/v3/bots/post', json=data)
@ -1788,10 +1788,10 @@ class GROWL(Notifier):
try:
growl.register()
except gntp.notifier.errors.NetworkError:
logger.error(u"Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
logger.error("Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
return False
except gntp.notifier.errors.AuthError:
logger.error(u"Tautulli Notifiers :: {name} notification failed: authentication error".format(name=self.NAME))
logger.error("Tautulli Notifiers :: {name} notification failed: authentication error".format(name=self.NAME))
return False
# Fix message
@ -1811,10 +1811,10 @@ class GROWL(Notifier):
description=body,
icon=image
)
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
except gntp.notifier.errors.NetworkError:
logger.error(u"Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
logger.error("Tautulli Notifiers :: {name} notification failed: network error".format(name=self.NAME))
return False
def _return_config_options(self):
@ -2113,15 +2113,15 @@ class JOIN(Notifier):
if r.status_code == 200:
response_data = r.json()
if response_data.get('success'):
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
else:
error_msg = response_data.get('errorMessage')
logger.error(u"Tautulli Notifiers :: {name} notification failed: {msg}".format(name=self.NAME, msg=error_msg))
logger.error("Tautulli Notifiers :: {name} notification failed: {msg}".format(name=self.NAME, msg=error_msg))
return False
else:
logger.error(u"Tautulli Notifiers :: {name} notification failed: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
logger.error("Tautulli Notifiers :: {name} notification failed: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
return False
def get_devices(self):
@ -2141,14 +2141,14 @@ class JOIN(Notifier):
devices.update({d['deviceName']: d['deviceName'] for d in response_devices})
else:
error_msg = response_data.get('errorMessage')
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=error_msg))
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=error_msg))
else:
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} devices list: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: [{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
except Exception as e:
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
return devices
@ -2235,7 +2235,7 @@ class MQTT(Notifier):
def agent_notify(self, subject='', body='', action='', **kwargs):
if not self.config['topic']:
logger.error(u"Tautulli Notifiers :: MQTT topic not specified.")
logger.error("Tautulli Notifiers :: MQTT topic not specified.")
return
data = {'subject': subject.encode('utf-8'),
@ -2349,11 +2349,11 @@ class NMA(Notifier):
response = p.push(title, subject, body, priority=self.config['priority'], batch_mode=batch)
if response[self.config['api_key']][u'code'] == u'200':
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
if response[self.config['api_key']]['code'] == '200':
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
else:
logger.error(u"Tautulli Notifiers :: {name} notification failed.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: {name} notification failed.".format(name=self.NAME))
return False
def _return_config_options(self):
@ -2390,7 +2390,7 @@ class OSX(Notifier):
self.objc = __import__("objc")
self.AppKit = __import__("AppKit")
except:
# logger.error(u"Tautulli Notifiers :: Cannot load OSX Notifications agent.")
# logger.error("Tautulli Notifiers :: Cannot load OSX Notifications agent.")
pass
def validate(self):
@ -2449,13 +2449,13 @@ class OSX(Notifier):
notification_center = NSUserNotificationCenter.defaultUserNotificationCenter()
notification_center.deliverNotification_(notification)
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
del pool
return True
except Exception as e:
logger.error(u"Tautulli Notifiers :: {name} failed: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: {name} failed: {e}".format(name=self.NAME, e=e))
return False
def _return_config_options(self):
@ -2527,7 +2527,7 @@ class PLEX(Notifier):
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
for host in hosts:
logger.info(u"Tautulli Notifiers :: Sending notification command to {name} @ {host}".format(name=self.NAME, host=host))
logger.info("Tautulli Notifiers :: Sending notification command to {name} @ {host}".format(name=self.NAME, host=host))
try:
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version']['major']
@ -2543,10 +2543,10 @@ class PLEX(Notifier):
if not request:
raise Exception
else:
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
except Exception as e:
logger.error(u"Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
return False
return True
@ -2694,7 +2694,7 @@ class PUSHBULLET(Notifier):
poster_content = result[0]
else:
poster_content = ''
logger.error(u"Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
if poster_content:
poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
@ -2713,9 +2713,9 @@ class PUSHBULLET(Notifier):
file_response.pop('data', None)
data.update(file_response)
else:
logger.error(u"Tautulli Notifiers :: Unable to upload image to {name}: "
u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
logger.error("Tautulli Notifiers :: Unable to upload image to {name}: "
"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
return self.make_request('https://api.pushbullet.com/v2/pushes', headers=headers, json=data)
@ -2734,12 +2734,12 @@ class PUSHBULLET(Notifier):
pushbullet_devices = response_data.get('devices', [])
devices.update({d['iden']: d['nickname'] for d in pushbullet_devices if d['active']})
else:
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} devices list: "
u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: "
"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
except Exception as e:
logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
logger.error("Tautulli Notifiers :: Unable to retrieve {name} devices list: {msg}".format(name=self.NAME, msg=e))
return devices
@ -2851,7 +2851,7 @@ class PUSHOVER(Notifier):
poster_content = result[0]
else:
poster_content = ''
logger.error(u"Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
if poster_content:
poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
@ -2901,9 +2901,9 @@ class PUSHOVER(Notifier):
# print sounds
# return sounds
# else:
# logger.error(u"Tautulli Notifiers :: Unable to retrieve {name} sounds list: "
# u"[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
# logger.debug(u"Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
# logger.error("Tautulli Notifiers :: Unable to retrieve {name} sounds list: "
# "[{r.status_code}] {r.reason}".format(name=self.NAME, r=r))
# logger.debug("Tautulli Notifiers :: Request response: {}".format(request.server_message(r, True)))
# return {'': ''}
#
# else:
@ -3089,13 +3089,13 @@ class SCRIPTS(Notifier):
timer.start()
output, error = process.communicate()
status = process.returncode
logger.debug(u"Tautulli Notifiers :: Subprocess returned with status code %s." % status)
logger.debug("Tautulli Notifiers :: Subprocess returned with status code %s." % status)
finally:
if timer:
timer.cancel()
except OSError as e:
logger.error(u"Tautulli Notifiers :: Failed to run script: %s" % e)
logger.error("Tautulli Notifiers :: Failed to run script: %s" % e)
return False
if error:
@ -3107,13 +3107,13 @@ class SCRIPTS(Notifier):
logger.debug("Tautulli Notifiers :: Script returned: \n %s" % out)
if not self.script_killed:
logger.info(u"Tautulli Notifiers :: Script notification sent.")
logger.info("Tautulli Notifiers :: Script notification sent.")
return True
def kill_script(self, process):
process.kill()
self.script_killed = True
logger.warn(u"Tautulli Notifiers :: Script exceeded timeout limit of %d seconds. "
logger.warn("Tautulli Notifiers :: Script exceeded timeout limit of %d seconds. "
"Script killed." % self.config['timeout'])
def agent_notify(self, subject='', body='', action='', **kwargs):
@ -3124,12 +3124,12 @@ class SCRIPTS(Notifier):
action(string): 'play'
"""
if not self.config['script_folder']:
logger.error(u"Tautulli Notifiers :: No script folder specified.")
logger.error("Tautulli Notifiers :: No script folder specified.")
return
script_args = helpers.split_args(kwargs.get('script_args', subject))
logger.debug(u"Tautulli Notifiers :: Trying to run notify script, action: %s, arguments: %s"
logger.debug("Tautulli Notifiers :: Trying to run notify script, action: %s, arguments: %s"
% (action, script_args))
script = kwargs.get('script', self.config.get('script', ''))
@ -3137,10 +3137,10 @@ class SCRIPTS(Notifier):
# Don't try to run the script if the action does not have one
if action and not script:
logger.debug(u"Tautulli Notifiers :: No script selected for action %s, exiting..." % action)
logger.debug("Tautulli Notifiers :: No script selected for action %s, exiting..." % action)
return
elif not script:
logger.debug(u"Tautulli Notifiers :: No script selected, exiting...")
logger.debug("Tautulli Notifiers :: No script selected, exiting...")
return
name, ext = os.path.splitext(script)
@ -3178,8 +3178,8 @@ class SCRIPTS(Notifier):
script.extend(script_args)
logger.debug(u"Tautulli Notifiers :: Full script is: %s" % script)
logger.debug(u"Tautulli Notifiers :: Executing script in a new thread.")
logger.debug("Tautulli Notifiers :: Full script is: %s" % script)
logger.debug("Tautulli Notifiers :: Executing script in a new thread.")
thread = threading.Thread(target=self.run_script, args=(script, user_id)).start()
return True
@ -3433,7 +3433,7 @@ class TELEGRAM(Notifier):
poster_content = result[0]
else:
poster_content = ''
logger.error(u"Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
logger.error("Tautulli Notifiers :: Unable to retrieve image for {name}.".format(name=self.NAME))
if poster_content:
poster_filename = 'poster_{}.png'.format(pretty_metadata.parameters['rating_key'])
@ -3531,16 +3531,16 @@ class TWITTER(Notifier):
access_token = self.config['access_token']
access_token_secret = self.config['access_token_secret']
# logger.info(u"Tautulli Notifiers :: Sending tweet: " + message)
# logger.info("Tautulli Notifiers :: Sending tweet: " + message)
api = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
try:
api.PostUpdate(message, media=attachment)
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
return True
except Exception as e:
logger.error(u"Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
return False
def agent_notify(self, subject='', body='', action='', **kwargs):
@ -3618,7 +3618,7 @@ class WEBHOOK(Notifier):
try:
webhook_headers = json.loads(subject)
except ValueError as e:
logger.error(u"Tautulli Notifiers :: Invalid {name} json header data: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: Invalid {name} json header data: {e}".format(name=self.NAME, e=e))
return False
else:
webhook_headers = None
@ -3627,7 +3627,7 @@ class WEBHOOK(Notifier):
try:
webhook_body = json.loads(body)
except ValueError as e:
logger.error(u"Tautulli Notifiers :: Invalid {name} json body data: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: Invalid {name} json body data: {e}".format(name=self.NAME, e=e))
return False
else:
webhook_body = None
@ -3711,7 +3711,7 @@ class XBMC(Notifier):
image = os.path.join(plexpy.DATA_DIR, os.path.abspath("data/interfaces/default/images/logo-circle.png"))
for host in hosts:
logger.info(u"Tautulli Notifiers :: Sending notification command to XMBC @ " + host)
logger.info("Tautulli Notifiers :: Sending notification command to XMBC @ " + host)
try:
version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version']['major']
@ -3727,10 +3727,10 @@ class XBMC(Notifier):
if not request:
raise Exception
else:
logger.info(u"Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
logger.info("Tautulli Notifiers :: {name} notification sent.".format(name=self.NAME))
except Exception as e:
logger.error(u"Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
logger.error("Tautulli Notifiers :: {name} notification failed: {e}".format(name=self.NAME, e=e))
return False
return True
@ -3870,7 +3870,7 @@ class ZAPIER(Notifier):
def upgrade_config_to_db():
logger.info(u"Tautulli Notifiers :: Upgrading to new notification system...")
logger.info("Tautulli Notifiers :: Upgrading to new notification system...")
# Set flag first in case something fails we don't want to keep re-adding the notifiers
plexpy.CONFIG.__setattr__('UPDATE_NOTIFIERS_DB', 0)

View file

@ -32,13 +32,13 @@ def extract_plexivity_xml(xml=None):
try:
xml_parse = minidom.parseString(clean_xml)
except:
logger.warn(u"Tautulli Importer :: Error parsing XML for Plexivity database.")
logger.warn("Tautulli Importer :: Error parsing XML for Plexivity database.")
return None
# I think Plexivity only tracked videos and not music?
xml_head = xml_parse.getElementsByTagName('Video')
if not xml_head:
logger.warn(u"Tautulli Importer :: Error parsing XML for Plexivity database.")
logger.warn("Tautulli Importer :: Error parsing XML for Plexivity database.")
return None
for a in xml_head:
@ -239,23 +239,23 @@ def validate_database(database=None, table_name=None):
try:
connection = sqlite3.connect(database, timeout=20)
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except ValueError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error(u"Tautulli Importer :: Uncaught exception.")
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
try:
connection.execute('SELECT xml from %s' % table_name)
connection.close()
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error(u"Tautulli Importer :: Uncaught exception.")
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
return 'success'
@ -266,19 +266,19 @@ def import_from_plexivity(database=None, table_name=None, import_ignore_interval
connection = sqlite3.connect(database, timeout=20)
connection.row_factory = sqlite3.Row
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid filename.")
logger.error("Tautulli Importer :: Invalid filename.")
return None
except ValueError:
logger.error(u"Tautulli Importer :: Invalid filename.")
logger.error("Tautulli Importer :: Invalid filename.")
return None
try:
connection.execute('SELECT xml from %s' % table_name)
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Database specified does not contain the required fields.")
logger.error("Tautulli Importer :: Database specified does not contain the required fields.")
return None
logger.debug(u"Tautulli Importer :: Plexivity data import in progress...")
logger.debug("Tautulli Importer :: Plexivity data import in progress...")
ap = activity_processor.ActivityProcessor()
user_data = users.Users()
@ -287,7 +287,7 @@ def import_from_plexivity(database=None, table_name=None, import_ignore_interval
try:
users.refresh_users()
except:
logger.debug(u"Tautulli Importer :: Unable to refresh the users list. Aborting import.")
logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.")
return None
query = 'SELECT id AS id, ' \
@ -320,13 +320,13 @@ def import_from_plexivity(database=None, table_name=None, import_ignore_interval
# If we get back None from our xml extractor skip over the record and log error.
if not extracted_xml:
logger.error(u"Tautulli Importer :: Skipping record with id %s due to malformed xml."
logger.error("Tautulli Importer :: Skipping record with id %s due to malformed xml."
% str(row['id']))
continue
# Skip line if we don't have a ratingKey to work with
#if not row['rating_key']:
# logger.error(u"Tautulli Importer :: Skipping record due to null ratingKey.")
# logger.error("Tautulli Importer :: Skipping record due to null ratingKey.")
# continue
# If the user_id no longer exists in the friends list, pull it from the xml.
@ -430,13 +430,13 @@ def import_from_plexivity(database=None, table_name=None, import_ignore_interval
is_import=True,
import_ignore_interval=import_ignore_interval)
else:
logger.debug(u"Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
logger.debug("Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
logger.debug(u"Tautulli Importer :: Plexivity data import complete.")
logger.debug("Tautulli Importer :: Plexivity data import complete.")
import_users()
def import_users():
logger.debug(u"Tautulli Importer :: Importing Plexivity Users...")
logger.debug("Tautulli Importer :: Importing Plexivity Users...")
monitor_db = database.MonitorDatabase()
query = 'INSERT OR IGNORE INTO users (user_id, username) ' \
@ -445,6 +445,6 @@ def import_users():
try:
monitor_db.action(query)
logger.debug(u"Tautulli Importer :: Users imported.")
logger.debug("Tautulli Importer :: Users imported.")
except:
logger.debug(u"Tautulli Importer :: Failed to import users.")
logger.debug("Tautulli Importer :: Failed to import users.")

View file

@ -31,7 +31,7 @@ import session
def get_server_resources(return_presence=False, return_server=False, **kwargs):
if not return_presence:
logger.info(u"Tautulli PlexTV :: Requesting resources for server...")
logger.info("Tautulli PlexTV :: Requesting resources for server...")
server = {'pms_name': plexpy.CONFIG.PMS_NAME,
'pms_version': plexpy.CONFIG.PMS_VERSION,
@ -97,17 +97,17 @@ def get_server_resources(return_presence=False, return_server=False, **kwargs):
conn = next((c for c in conns if c['address'] == server['pms_ip']
and c['port'] == str(server['pms_port'])), conns[0])
server['pms_url'] = conn['uri']
logger.info(u"Tautulli PlexTV :: Server URL retrieved.")
logger.info("Tautulli PlexTV :: Server URL retrieved.")
# get_server_urls() failed or PMS_URL not found, fallback url doesn't use SSL
if not server['pms_url']:
server['pms_url'] = fallback_url
logger.warn(u"Tautulli PlexTV :: Unable to retrieve server URLs. Using user-defined value without SSL.")
logger.warn("Tautulli PlexTV :: Unable to retrieve server URLs. Using user-defined value without SSL.")
# Not using SSL, remote has no effect
else:
server['pms_url'] = fallback_url
logger.info(u"Tautulli PlexTV :: Using user-defined URL.")
logger.info("Tautulli PlexTV :: Using user-defined URL.")
if return_server:
return server
@ -141,7 +141,7 @@ class PlexTV(object):
self.token = plexpy.CONFIG.PMS_TOKEN
if not self.token:
logger.error(u"Tautulli PlexTV :: PlexTV called, but no token provided.")
logger.error("Tautulli PlexTV :: PlexTV called, but no token provided.")
return
self.request_handler = http_handler.HTTPHandler(urls=self.urls,
@ -175,9 +175,9 @@ class PlexTV(object):
'user_id': xml_head[0].getAttribute('id')
}
else:
logger.warn(u"Tautulli PlexTV :: Could not get Plex authentication token.")
logger.warn("Tautulli PlexTV :: Could not get Plex authentication token.")
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_token: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_token: %s." % e)
return None
return user
@ -186,27 +186,27 @@ class PlexTV(object):
def get_plexpy_pms_token(self, force=False):
if force:
logger.debug(u"Tautulli PlexTV :: Forcing refresh of Plex.tv token.")
logger.debug("Tautulli PlexTV :: Forcing refresh of Plex.tv token.")
devices_list = self.get_devices_list()
device_id = next((d for d in devices_list if d['device_identifier'] == plexpy.CONFIG.PMS_UUID), {}).get('device_id', None)
if device_id:
logger.debug(u"Tautulli PlexTV :: Removing Tautulli from Plex.tv devices.")
logger.debug("Tautulli PlexTV :: Removing Tautulli from Plex.tv devices.")
try:
self.delete_plextv_device(device_id=device_id)
except:
logger.error(u"Tautulli PlexTV :: Failed to remove Tautulli from Plex.tv devices.")
logger.error("Tautulli PlexTV :: Failed to remove Tautulli from Plex.tv devices.")
return None
else:
logger.warn(u"Tautulli PlexTV :: No existing Tautulli device found.")
logger.warn("Tautulli PlexTV :: No existing Tautulli device found.")
logger.info(u"Tautulli PlexTV :: Fetching a new Plex.tv token for Tautulli.")
logger.info("Tautulli PlexTV :: Fetching a new Plex.tv token for Tautulli.")
user = self.get_token()
if user:
token = user['auth_token']
plexpy.CONFIG.__setattr__('PMS_TOKEN', token)
plexpy.CONFIG.write()
logger.info(u"Tautulli PlexTV :: Updated Plex.tv token for Tautulli.")
logger.info("Tautulli PlexTV :: Updated Plex.tv token for Tautulli.")
return token
@ -217,7 +217,7 @@ class PlexTV(object):
try:
xml_head = servers.getElementsByTagName('Device')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_server_token: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_token: %s." % e)
return None
for a in xml_head:
@ -257,11 +257,11 @@ class PlexTV(object):
}
return pin
else:
logger.warn(u"Tautulli PlexTV :: Could not get Plex authentication pin.")
logger.warn("Tautulli PlexTV :: Could not get Plex authentication pin.")
return None
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_pin: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_pin: %s." % e)
return None
else:
@ -388,7 +388,7 @@ class PlexTV(object):
try:
xml_head = own_account.getElementsByTagName('user')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse own account XML for get_full_users_list: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse own account XML for get_full_users_list: %s." % e)
return []
for a in xml_head:
@ -415,7 +415,7 @@ class PlexTV(object):
try:
xml_head = friends_list.getElementsByTagName('User')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse friends list XML for get_full_users_list: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse friends list XML for get_full_users_list: %s." % e)
return []
for a in xml_head:
@ -439,7 +439,7 @@ class PlexTV(object):
try:
xml_head = shared_servers.getElementsByTagName('SharedServer')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse shared server list XML for get_full_users_list: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse shared server list XML for get_full_users_list: %s." % e)
return []
user_map = {}
@ -484,7 +484,7 @@ class PlexTV(object):
try:
xml_head = sync_list.getElementsByTagName('SyncList')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_synced_items: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_synced_items: %s." % e)
return {}
for a in xml_head:
@ -596,13 +596,13 @@ class PlexTV(object):
return session.filter_session_info(synced_items, filter_key='user_id')
def delete_sync(self, client_id, sync_id):
logger.info(u"Tautulli PlexTV :: Deleting sync item '%s'." % sync_id)
logger.info("Tautulli PlexTV :: Deleting sync item '%s'." % sync_id)
self.delete_plextv_sync(client_id=client_id, sync_id=sync_id)
def get_server_connections(self, pms_identifier='', pms_ip='', pms_port=32400, include_https=True):
if not pms_identifier:
logger.error(u"Tautulli PlexTV :: Unable to retrieve server connections: no pms_identifier provided.")
logger.error("Tautulli PlexTV :: Unable to retrieve server connections: no pms_identifier provided.")
return {}
plextv_resources = self.get_plextv_resources(include_https=include_https,
@ -610,7 +610,7 @@ class PlexTV(object):
try:
xml_head = plextv_resources.getElementsByTagName('Device')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_urls: %s." % e)
return {}
# Function to get all connections for a device
@ -671,7 +671,7 @@ class PlexTV(object):
try:
xml_head = servers.getElementsByTagName('Server')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_server_times: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_server_times: %s." % e)
return {}
for a in xml_head:
@ -713,7 +713,7 @@ class PlexTV(object):
try:
xml_head = servers.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Failed to get servers from plex: %s." % e)
logger.warn("Tautulli PlexTV :: Failed to get servers from plex: %s." % e)
return []
for a in xml_head:
@ -773,20 +773,20 @@ class PlexTV(object):
return clean_servers
def get_plex_downloads(self):
logger.debug(u"Tautulli PlexTV :: Retrieving current server version.")
logger.debug("Tautulli PlexTV :: Retrieving current server version.")
pms_connect = pmsconnect.PmsConnect()
pms_connect.set_server_version()
update_channel = pms_connect.get_server_update_channel()
logger.debug(u"Tautulli PlexTV :: Plex update channel is %s." % update_channel)
logger.debug("Tautulli PlexTV :: Plex update channel is %s." % update_channel)
plex_downloads = self.get_plextv_downloads(plexpass=(update_channel == 'beta'))
try:
available_downloads = json.loads(plex_downloads)
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to load JSON for get_plex_updates.")
logger.warn("Tautulli PlexTV :: Unable to load JSON for get_plex_updates.")
return {}
# Get the updates for the platform
@ -795,7 +795,7 @@ class PlexTV(object):
available_downloads.get('nas').get(pms_platform)
if not platform_downloads:
logger.error(u"Tautulli PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Could not match server platform: %s."
% pms_platform)
return {}
@ -803,11 +803,11 @@ class PlexTV(object):
v_new = helpers.cast_to_int("".join(v.zfill(4) for v in platform_downloads.get('version', '').split('-')[0].split('.')[:4]))
if not v_old:
logger.error(u"Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid current server version: %s."
% plexpy.CONFIG.PMS_VERSION)
return {}
if not v_new:
logger.error(u"Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
logger.error("Tautulli PlexTV :: Unable to retrieve Plex updates: Invalid new server version: %s."
% platform_downloads.get('version'))
return {}
@ -838,7 +838,7 @@ class PlexTV(object):
try:
subscription = account_data.getElementsByTagName('subscription')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_plexpass_status: %s." % e)
return False
if subscription and helpers.get_xml_attr(subscription[0], 'active') == '1':
@ -846,7 +846,7 @@ class PlexTV(object):
plexpy.CONFIG.write()
return True
else:
logger.debug(u"Tautulli PlexTV :: Plex Pass subscription not found.")
logger.debug("Tautulli PlexTV :: Plex Pass subscription not found.")
plexpy.CONFIG.__setattr__('PMS_PLEXPASS', 0)
plexpy.CONFIG.write()
return False
@ -857,7 +857,7 @@ class PlexTV(object):
try:
xml_head = devices.getElementsByTagName('Device')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_devices_list: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_devices_list: %s." % e)
return []
devices_list = []
@ -885,7 +885,7 @@ class PlexTV(object):
try:
status_info = cloud_status.getElementsByTagName('info')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_cloud_server_status: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_cloud_server_status: %s." % e)
return False
for info in status_info:
@ -903,7 +903,7 @@ class PlexTV(object):
try:
xml_head = account_data.getElementsByTagName('user')
except Exception as e:
logger.warn(u"Tautulli PlexTV :: Unable to parse XML for get_plex_account_details: %s." % e)
logger.warn("Tautulli PlexTV :: Unable to parse XML for get_plex_account_details: %s." % e)
return None
for a in xml_head:

View file

@ -31,12 +31,12 @@ def extract_plexwatch_xml(xml=None):
try:
xml_parse = minidom.parseString(clean_xml)
except:
logger.warn(u"Tautulli Importer :: Error parsing XML for PlexWatch database.")
logger.warn("Tautulli Importer :: Error parsing XML for PlexWatch database.")
return None
xml_head = xml_parse.getElementsByTagName('opt')
if not xml_head:
logger.warn(u"Tautulli Importer :: Error parsing XML for PlexWatch database.")
logger.warn("Tautulli Importer :: Error parsing XML for PlexWatch database.")
return None
for a in xml_head:
@ -230,23 +230,23 @@ def validate_database(database=None, table_name=None):
try:
connection = sqlite3.connect(database, timeout=20)
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except ValueError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error(u"Tautulli Importer :: Uncaught exception.")
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
try:
connection.execute('SELECT ratingKey from %s' % table_name)
connection.close()
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid database specified.")
logger.error("Tautulli Importer :: Invalid database specified.")
return 'Invalid database specified.'
except:
logger.error(u"Tautulli Importer :: Uncaught exception.")
logger.error("Tautulli Importer :: Uncaught exception.")
return 'Uncaught exception.'
return 'success'
@ -257,19 +257,19 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
connection = sqlite3.connect(database, timeout=20)
connection.row_factory = sqlite3.Row
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Invalid filename.")
logger.error("Tautulli Importer :: Invalid filename.")
return None
except ValueError:
logger.error(u"Tautulli Importer :: Invalid filename.")
logger.error("Tautulli Importer :: Invalid filename.")
return None
try:
connection.execute('SELECT ratingKey from %s' % table_name)
except sqlite3.OperationalError:
logger.error(u"Tautulli Importer :: Database specified does not contain the required fields.")
logger.error("Tautulli Importer :: Database specified does not contain the required fields.")
return None
logger.debug(u"Tautulli Importer :: PlexWatch data import in progress...")
logger.debug("Tautulli Importer :: PlexWatch data import in progress...")
ap = activity_processor.ActivityProcessor()
user_data = users.Users()
@ -278,7 +278,7 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
try:
users.refresh_users()
except:
logger.debug(u"Tautulli Importer :: Unable to refresh the users list. Aborting import.")
logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.")
return None
query = 'SELECT time AS started, ' \
@ -313,13 +313,13 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
# If we get back None from our xml extractor skip over the record and log error.
if not extracted_xml:
logger.error(u"Tautulli Importer :: Skipping record with ratingKey %s due to malformed xml."
logger.error("Tautulli Importer :: Skipping record with ratingKey %s due to malformed xml."
% str(row['rating_key']))
continue
# Skip line if we don't have a ratingKey to work with
if not row['rating_key']:
logger.error(u"Tautulli Importer :: Skipping record due to null ratingKey.")
logger.error("Tautulli Importer :: Skipping record due to null ratingKey.")
continue
# If the user_id no longer exists in the friends list, pull it from the xml.
@ -423,13 +423,13 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval
is_import=True,
import_ignore_interval=import_ignore_interval)
else:
logger.debug(u"Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
logger.debug("Tautulli Importer :: Item has bad rating_key: %s" % session_history_metadata['rating_key'])
logger.debug(u"Tautulli Importer :: PlexWatch data import complete.")
logger.debug("Tautulli Importer :: PlexWatch data import complete.")
import_users()
def import_users():
logger.debug(u"Tautulli Importer :: Importing PlexWatch Users...")
logger.debug("Tautulli Importer :: Importing PlexWatch Users...")
monitor_db = database.MonitorDatabase()
query = 'INSERT OR IGNORE INTO users (user_id, username) ' \
@ -438,6 +438,6 @@ def import_users():
try:
monitor_db.action(query)
logger.debug(u"Tautulli Importer :: Users imported.")
logger.debug("Tautulli Importer :: Users imported.")
except:
logger.debug(u"Tautulli Importer :: Failed to import users.")
logger.debug("Tautulli Importer :: Failed to import users.")

View file

@ -30,7 +30,7 @@ import users
def get_server_friendly_name():
logger.info(u"Tautulli Pmsconnect :: Requesting name from server...")
logger.info("Tautulli Pmsconnect :: Requesting name from server...")
server_name = PmsConnect().get_server_pref(pref='FriendlyName')
# If friendly name is blank
@ -44,7 +44,7 @@ def get_server_friendly_name():
if server_name and server_name != plexpy.CONFIG.PMS_NAME:
plexpy.CONFIG.__setattr__('PMS_NAME', server_name)
plexpy.CONFIG.write()
logger.info(u"Tautulli Pmsconnect :: Server name retrieved.")
logger.info("Tautulli Pmsconnect :: Server name retrieved.")
return server_name
@ -474,7 +474,7 @@ class PmsConnect(object):
try:
xml_head = recent.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_recently_added: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_recently_added: %s." % e)
return []
for a in xml_head:
@ -610,7 +610,7 @@ class PmsConnect(object):
try:
xml_head = metadata_xml.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_metadata_details: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_metadata_details: %s." % e)
return {}
for a in xml_head:
@ -627,7 +627,7 @@ class PmsConnect(object):
elif a.getElementsByTagName('Photo'):
metadata_main_list = a.getElementsByTagName('Photo')
else:
logger.debug(u"Tautulli Pmsconnect :: Metadata failed")
logger.debug("Tautulli Pmsconnect :: Metadata failed")
return {}
if sync_id and len(metadata_main_list) > 1:
@ -819,7 +819,7 @@ class PmsConnect(object):
'genres': show_details['genres'],
'labels': show_details['labels'],
'collections': show_details['collections'],
'full_title': u'{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
helpers.get_xml_attr(metadata_main, 'title')),
'children_count': helpers.get_xml_attr(metadata_main, 'leafCount')
}
@ -885,7 +885,7 @@ class PmsConnect(object):
'genres': show_details['genres'],
'labels': show_details['labels'],
'collections': show_details['collections'],
'full_title': u'{} - {}'.format(helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'grandparentTitle'),
helpers.get_xml_attr(metadata_main, 'title')),
'children_count': helpers.get_xml_attr(metadata_main, 'leafCount')
}
@ -982,7 +982,7 @@ class PmsConnect(object):
'genres': genres,
'labels': labels,
'collections': collections,
'full_title': u'{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle'),
helpers.get_xml_attr(metadata_main, 'title')),
'children_count': helpers.get_xml_attr(metadata_main, 'leafCount')
}
@ -1034,7 +1034,7 @@ class PmsConnect(object):
'genres': album_details['genres'],
'labels': album_details['labels'],
'collections': album_details['collections'],
'full_title': u'{} - {}'.format(helpers.get_xml_attr(metadata_main, 'title'),
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'title'),
track_artist),
'children_count': helpers.get_xml_attr(metadata_main, 'leafCount')
}
@ -1131,7 +1131,7 @@ class PmsConnect(object):
'genres': photo_album_details.get('genres', ''),
'labels': photo_album_details.get('labels', ''),
'collections': photo_album_details.get('collections', ''),
'full_title': u'{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle') or library_name,
'full_title': '{} - {}'.format(helpers.get_xml_attr(metadata_main, 'parentTitle') or library_name,
helpers.get_xml_attr(metadata_main, 'title')),
'children_count': helpers.get_xml_attr(metadata_main, 'leafCount')
}
@ -1355,7 +1355,7 @@ class PmsConnect(object):
with open(out_file_path, 'w') as outFile:
json.dump(metadata, outFile)
except (IOError, ValueError) as e:
logger.error(u"Tautulli Pmsconnect :: Unable to create cache file for metadata (sessionKey %s): %s"
logger.error("Tautulli Pmsconnect :: Unable to create cache file for metadata (sessionKey %s): %s"
% (cache_key, e))
return metadata
@ -1375,7 +1375,7 @@ class PmsConnect(object):
try:
xml_head = metadata.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_metadata_children: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_metadata_children: %s." % e)
return []
metadata_list = []
@ -1425,7 +1425,7 @@ class PmsConnect(object):
try:
xml_head = libraries_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_library_metadata_details: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_library_metadata_details: %s." % e)
return []
metadata_list = []
@ -1470,7 +1470,7 @@ class PmsConnect(object):
try:
xml_head = session_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_current_activity: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_current_activity: %s." % e)
return []
session_list = []
@ -2070,7 +2070,7 @@ class PmsConnect(object):
plex_tv = plextv.PlexTV()
if not plex_tv.get_plexpass_status():
msg = 'No Plex Pass subscription'
logger.warn(u"Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
logger.warn("Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
return msg
message = message.encode('utf-8') or 'The server owner has ended the stream.'
@ -2092,16 +2092,16 @@ class PmsConnect(object):
if not session:
msg = 'Invalid session_key (%s) or session_id (%s)' % (session_key, session_id)
logger.warn(u"Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
logger.warn("Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
return msg
if session_id:
logger.info(u"Tautulli Pmsconnect :: Terminating session %s (session_id %s)." % (session_key, session_id))
logger.info("Tautulli Pmsconnect :: Terminating session %s (session_id %s)." % (session_key, session_id))
result = self.get_sessions_terminate(session_id=session_id, reason=message)
return True
else:
msg = 'Missing session_id'
logger.warn(u"Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
logger.warn("Tautulli Pmsconnect :: Failed to terminate session: %s." % msg)
return msg
def get_item_children(self, rating_key='', get_grandchildren=False):
@ -2118,7 +2118,7 @@ class PmsConnect(object):
try:
xml_head = children_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_item_children: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_item_children: %s." % e)
return []
children_list = []
@ -2126,7 +2126,7 @@ class PmsConnect(object):
for a in xml_head:
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
logger.debug(u"Tautulli Pmsconnect :: No children data.")
logger.debug("Tautulli Pmsconnect :: No children data.")
children_list = {'children_count': '0',
'children_list': []
}
@ -2231,7 +2231,7 @@ class PmsConnect(object):
try:
xml_head = children_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_item_children_related: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_item_children_related: %s." % e)
return []
children_results_list = {'movie': [],
@ -2297,7 +2297,7 @@ class PmsConnect(object):
try:
xml_head = recent.getElementsByTagName('Server')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_server_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_server_list: %s." % e)
return []
server_info = []
@ -2324,7 +2324,7 @@ class PmsConnect(object):
try:
xml_head = identity.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_local_server_identity: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_local_server_identity: %s." % e)
return {}
server_identity = {}
@ -2349,7 +2349,7 @@ class PmsConnect(object):
try:
xml_head = prefs.getElementsByTagName('Setting')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_local_server_name: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_local_server_name: %s." % e)
return ''
pref_value = 'None'
@ -2360,7 +2360,7 @@ class PmsConnect(object):
return pref_value
else:
logger.debug(u"Tautulli Pmsconnect :: Server preferences queried but no parameter received.")
logger.debug("Tautulli Pmsconnect :: Server preferences queried but no parameter received.")
return None
def get_server_children(self):
@ -2374,7 +2374,7 @@ class PmsConnect(object):
try:
xml_head = libraries_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_libraries_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_libraries_list: %s." % e)
return []
libraries_list = []
@ -2382,7 +2382,7 @@ class PmsConnect(object):
for a in xml_head:
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
logger.debug(u"Tautulli Pmsconnect :: No libraries data.")
logger.debug("Tautulli Pmsconnect :: No libraries data.")
libraries_list = {'libraries_count': '0',
'libraries_list': []
}
@ -2448,13 +2448,13 @@ class PmsConnect(object):
elif str(rating_key).isdigit():
library_data = self.get_metadata_children(str(rating_key), output_format='xml')
else:
logger.warn(u"Tautulli Pmsconnect :: get_library_children called by invalid section_id or rating_key provided.")
logger.warn("Tautulli Pmsconnect :: get_library_children called by invalid section_id or rating_key provided.")
return []
try:
xml_head = library_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_library_children_details: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_library_children_details: %s." % e)
return []
children_list = []
@ -2462,7 +2462,7 @@ class PmsConnect(object):
for a in xml_head:
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
logger.debug(u"Tautulli Pmsconnect :: No library data.")
logger.debug("Tautulli Pmsconnect :: No library data.")
children_list = {'library_count': '0',
'children_list': []
}
@ -2604,7 +2604,7 @@ class PmsConnect(object):
try:
xml_head = labels_data.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_library_label_details: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_library_label_details: %s." % e)
return None
labels_list = []
@ -2612,7 +2612,7 @@ class PmsConnect(object):
for a in xml_head:
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
logger.debug(u"Tautulli Pmsconnect :: No labels data.")
logger.debug("Tautulli Pmsconnect :: No labels data.")
return labels_list
if a.getElementsByTagName('Directory'):
@ -2674,7 +2674,7 @@ class PmsConnect(object):
return result[0], result[1]
else:
logger.error(u"Tautulli Pmsconnect :: Image proxy queried but no input received.")
logger.error("Tautulli Pmsconnect :: Image proxy queried but no input received.")
def get_search_results(self, query='', limit=''):
"""
@ -2687,7 +2687,7 @@ class PmsConnect(object):
try:
xml_head = search_results.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_search_result: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_search_result: %s." % e)
return []
search_results_list = {'movie': [],
@ -2770,7 +2770,7 @@ class PmsConnect(object):
section_id = metadata['section_id']
library_name = metadata['library_name']
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to get parent_rating_key for get_rating_keys_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to get parent_rating_key for get_rating_keys_list: %s." % e)
return {}
elif media_type == 'episode' or media_type == 'track':
@ -2780,7 +2780,7 @@ class PmsConnect(object):
section_id = metadata['section_id']
library_name = metadata['library_name']
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to get grandparent_rating_key for get_rating_keys_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to get grandparent_rating_key for get_rating_keys_list: %s." % e)
return {}
# get parent_rating_keys
@ -2789,7 +2789,7 @@ class PmsConnect(object):
try:
xml_head = metadata.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e)
return {}
for a in xml_head:
@ -2817,7 +2817,7 @@ class PmsConnect(object):
try:
xml_head = metadata.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e)
return {}
for a in xml_head:
@ -2865,7 +2865,7 @@ class PmsConnect(object):
try:
xml_head = account_data.getElementsByTagName('MyPlex')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_server_response: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_server_response: %s." % e)
return None
server_response = {}
@ -2887,13 +2887,13 @@ class PmsConnect(object):
try:
xml_head = updater_status.getElementsByTagName('MediaContainer')
except Exception as e:
logger.warn(u"Tautulli Pmsconnect :: Unable to parse XML for get_update_staus: %s." % e)
logger.warn("Tautulli Pmsconnect :: Unable to parse XML for get_update_staus: %s." % e)
# Catch the malformed XML on certain PMX version.
# XML parser helper returns empty list if there is an error parsing XML
if updater_status == []:
logger.warn(u"Plex API updater XML is broken on the current PMS version. Please update your PMS manually.")
logger.info(u"Tautulli is unable to check for Plex updates. Disabling check for Plex updates.")
logger.warn("Plex API updater XML is broken on the current PMS version. Please update your PMS manually.")
logger.info("Tautulli is unable to check for Plex updates. Disabling check for Plex updates.")
# Disable check for Plex updates
plexpy.CONFIG.MONITOR_PMS_UPDATES = 0

View file

@ -28,7 +28,7 @@ import session
def refresh_users():
logger.info(u"Tautulli Users :: Requesting users list refresh...")
logger.info("Tautulli Users :: Requesting users list refresh...")
result = plextv.PlexTV().get_full_users_list()
if result:
@ -58,10 +58,10 @@ def refresh_users():
monitor_db.upsert('users', item, keys_dict)
logger.info(u"Tautulli Users :: Users list refreshed.")
logger.info("Tautulli Users :: Users list refreshed.")
return True
else:
logger.warn(u"Tautulli Users :: Unable to refresh users list.")
logger.warn("Tautulli Users :: Unable to refresh users list.")
return False
@ -137,7 +137,7 @@ class Users(object):
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_list: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_list: %s." % e)
return default_return
users = query['result']
@ -248,7 +248,7 @@ class Users(object):
['session_history.id', 'session_history_media_info.id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_unique_ips: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_unique_ips: %s." % e)
return default_return
results = query['result']
@ -308,7 +308,7 @@ class Users(object):
try:
monitor_db.upsert('users', value_dict, key_dict)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for set_config: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for set_config: %s." % e)
def get_details(self, user_id=None, user=None, email=None):
default_return = {'user_id': 0,
@ -358,7 +358,7 @@ class Users(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_details: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_details: %s." % e)
result = []
user_details = {}
@ -403,7 +403,7 @@ class Users(object):
return user_details
else:
logger.warn(u"Tautulli Users :: Unable to retrieve user %s from database. Requesting user list refresh."
logger.warn("Tautulli Users :: Unable to retrieve user %s from database. Requesting user list refresh."
% user_id if user_id else user)
# Let's first refresh the user list to make sure the user isn't newly added and not in the db yet
refresh_users()
@ -414,7 +414,7 @@ class Users(object):
return user_details
else:
logger.warn(u"Tautulli Users :: Unable to retrieve user %s from database. Returning 'Local' user."
logger.warn("Tautulli Users :: Unable to retrieve user %s from database. Returning 'Local' user."
% user_id if user_id else user)
# If there is no user data we must return something
# Use "Local" user to retain compatibility with PlexWatch database value
@ -458,7 +458,7 @@ class Users(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_watch_time_stats: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_watch_time_stats: %s." % e)
result = []
for item in result:
@ -503,7 +503,7 @@ class Users(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_player_stats: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_player_stats: %s." % e)
result = []
for item in result:
@ -549,7 +549,7 @@ class Users(object):
else:
result = []
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_recently_watched: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_recently_watched: %s." % e)
result = []
for row in result:
@ -591,7 +591,7 @@ class Users(object):
'FROM users WHERE deleted_user = 0'
result = monitor_db.select(query=query)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_users: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_users: %s." % e)
return None
users = []
@ -625,7 +625,7 @@ class Users(object):
try:
if str(user_id).isdigit():
logger.info(u"Tautulli Users :: Deleting all history for user id %s from database." % user_id)
logger.info("Tautulli Users :: Deleting all history for user id %s from database." % user_id)
session_history_media_info_del = \
monitor_db.action('DELETE FROM '
'session_history_media_info '
@ -649,7 +649,7 @@ class Users(object):
else:
return 'Unable to delete items. Input user_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for delete_all_history: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for delete_all_history: %s." % e)
def delete(self, user_id=None):
monitor_db = database.MonitorDatabase()
@ -657,7 +657,7 @@ class Users(object):
try:
if str(user_id).isdigit():
self.delete_all_history(user_id)
logger.info(u"Tautulli Users :: Deleting user with id %s from database." % user_id)
logger.info("Tautulli Users :: Deleting user with id %s from database." % user_id)
monitor_db.action('UPDATE users SET deleted_user = 1 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET keep_history = 0 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET do_notify = 0 WHERE user_id = ?', [user_id])
@ -666,7 +666,7 @@ class Users(object):
else:
return 'Unable to delete user, user_id not valid.'
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for delete: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for delete: %s." % e)
def undelete(self, user_id=None, username=None):
monitor_db = database.MonitorDatabase()
@ -676,7 +676,7 @@ class Users(object):
query = 'SELECT * FROM users WHERE user_id = ?'
result = monitor_db.select(query=query, args=[user_id])
if result:
logger.info(u"Tautulli Users :: Re-adding user with id %s to database." % user_id)
logger.info("Tautulli Users :: Re-adding user with id %s to database." % user_id)
monitor_db.action('UPDATE users SET deleted_user = 0 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET keep_history = 1 WHERE user_id = ?', [user_id])
monitor_db.action('UPDATE users SET do_notify = 1 WHERE user_id = ?', [user_id])
@ -688,7 +688,7 @@ class Users(object):
query = 'SELECT * FROM users WHERE username = ?'
result = monitor_db.select(query=query, args=[username])
if result:
logger.info(u"Tautulli Users :: Re-adding user with username %s to database." % username)
logger.info("Tautulli Users :: Re-adding user with username %s to database." % username)
monitor_db.action('UPDATE users SET deleted_user = 0 WHERE username = ?', [username])
monitor_db.action('UPDATE users SET keep_history = 1 WHERE username = ?', [username])
monitor_db.action('UPDATE users SET do_notify = 1 WHERE username = ?', [username])
@ -697,7 +697,7 @@ class Users(object):
return False
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for undelete: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for undelete: %s." % e)
# Keep method for PlexWatch/Plexivity import
def get_user_id(self, user=None):
@ -731,7 +731,7 @@ class Users(object):
result = monitor_db.select(query)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_user_names: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_user_names: %s." % e)
return None
return session.friendly_name_to_username(result)
@ -768,7 +768,7 @@ class Users(object):
'WHERE user_id = ?'
result = monitor_db.select_single(query, args=[user_id])
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_filters: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_filters: %s." % e)
result = {}
filters_list = {}
@ -804,7 +804,7 @@ class Users(object):
try:
monitor_db.upsert(table_name='user_login', key_dict=keys, value_dict=values)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for set_login_log: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for set_login_log: %s." % e)
def get_datatables_user_login(self, user_id=None, kwargs=None):
default_return = {'recordsFiltered': 0,
@ -845,7 +845,7 @@ class Users(object):
join_evals=[['user_login.user_id', 'users.user_id']],
kwargs=kwargs)
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for get_datatables_user_login: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for get_datatables_user_login: %s." % e)
return default_return
results = query['result']
@ -880,10 +880,10 @@ class Users(object):
monitor_db = database.MonitorDatabase()
try:
logger.info(u"Tautulli Users :: Clearing login logs from database.")
logger.info("Tautulli Users :: Clearing login logs from database.")
monitor_db.action('DELETE FROM user_login')
monitor_db.action('VACUUM')
return True
except Exception as e:
logger.warn(u"Tautulli Users :: Unable to execute database query for delete_login_log: %s." % e)
logger.warn("Tautulli Users :: Unable to execute database query for delete_login_log: %s." % e)
return False

View file

@ -40,8 +40,8 @@ def start_thread():
# Check for any existing sessions on start up
activity_pinger.check_active_sessions(ws_request=True)
except Exception as e:
logger.error(u"Tautulli WebSocket :: Failed to check for active sessions: %s." % e)
logger.warn(u"Tautulli WebSocket :: Attempt to fix by flushing temporary sessions...")
logger.error("Tautulli WebSocket :: Failed to check for active sessions: %s." % e)
logger.warn("Tautulli WebSocket :: Attempt to fix by flushing temporary sessions...")
database.delete_sessions()
# Start the websocket listener on it's own thread
@ -55,7 +55,7 @@ def on_connect():
plexpy.PLEX_SERVER_UP = True
if not plexpy.PLEX_SERVER_UP:
logger.info(u"Tautulli WebSocket :: The Plex Media Server is back up.")
logger.info("Tautulli WebSocket :: The Plex Media Server is back up.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intup'})
plexpy.PLEX_SERVER_UP = True
@ -69,7 +69,7 @@ def on_disconnect():
plexpy.PLEX_SERVER_UP = False
if plexpy.PLEX_SERVER_UP:
logger.info(u"Tautulli WebSocket :: Unable to get a response from the server, Plex server is down.")
logger.info("Tautulli WebSocket :: Unable to get a response from the server, Plex server is down.")
plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_intdown'})
plexpy.PLEX_SERVER_UP = False
@ -79,7 +79,7 @@ def on_disconnect():
def reconnect():
close()
logger.info(u"Tautulli WebSocket :: Reconnecting websocket...")
logger.info("Tautulli WebSocket :: Reconnecting websocket...")
start_thread()
@ -90,14 +90,14 @@ def shutdown():
def close():
logger.info(u"Tautulli WebSocket :: Disconnecting websocket...")
logger.info("Tautulli WebSocket :: Disconnecting websocket...")
plexpy.WEBSOCKET.close()
plexpy.WS_CONNECTED = False
def send_ping():
if plexpy.WS_CONNECTED:
# logger.debug(u"Tautulli WebSocket :: Sending ping.")
# logger.debug("Tautulli WebSocket :: Sending ping.")
plexpy.WEBSOCKET.ping("Hi?")
global pong_timer
@ -110,7 +110,7 @@ def wait_pong():
global pong_count
pong_count += 1
logger.warning(u"Tautulli WebSocket :: Failed to receive pong from websocket, ping attempt %s." % str(pong_count))
logger.warn("Tautulli WebSocket :: Failed to receive pong from websocket, ping attempt %s." % str(pong_count))
if pong_count >= plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
pong_count = 0
@ -118,7 +118,7 @@ def wait_pong():
def receive_pong():
# logger.debug(u"Tautulli WebSocket :: Received pong.")
# logger.debug("Tautulli WebSocket :: Received pong.")
global pong_timer
global pong_count
if pong_timer:
@ -150,10 +150,10 @@ def run():
reconnects = 0
# Try an open the websocket connection
logger.info(u"Tautulli WebSocket :: Opening %swebsocket." % secure)
logger.info("Tautulli WebSocket :: Opening %swebsocket." % secure)
try:
plexpy.WEBSOCKET = create_connection(uri, header=header)
logger.info(u"Tautulli WebSocket :: Ready")
logger.info("Tautulli WebSocket :: Ready")
plexpy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s." % e)
@ -173,7 +173,7 @@ def run():
break
if reconnects == 0:
logger.warn(u"Tautulli WebSocket :: Connection has closed.")
logger.warn("Tautulli WebSocket :: Connection has closed.")
if not plexpy.CONFIG.PMS_IS_CLOUD and reconnects < plexpy.CONFIG.WEBSOCKET_CONNECTION_ATTEMPTS:
reconnects += 1
@ -182,11 +182,11 @@ def run():
if reconnects > 1:
time.sleep(plexpy.CONFIG.WEBSOCKET_CONNECTION_TIMEOUT)
logger.warn(u"Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects))
logger.warn("Tautulli WebSocket :: Reconnection attempt %s." % str(reconnects))
try:
plexpy.WEBSOCKET = create_connection(uri, header=header)
logger.info(u"Tautulli WebSocket :: Ready")
logger.info("Tautulli WebSocket :: Ready")
plexpy.WS_CONNECTED = True
except (websocket.WebSocketException, IOError, Exception) as e:
logger.error("Tautulli WebSocket :: %s." % e)
@ -206,7 +206,7 @@ def run():
if not plexpy.WS_CONNECTED and not ws_shutdown:
on_disconnect()
logger.debug(u"Tautulli WebSocket :: Leaving thread.")
logger.debug("Tautulli WebSocket :: Leaving thread.")
def receive(ws):
@ -220,7 +220,7 @@ def receive(ws):
ws.send_close()
return frame.opcode, None
elif frame.opcode == websocket.ABNF.OPCODE_PING:
# logger.debug(u"Tautulli WebSocket :: Received ping, sending pong.")
# logger.debug("Tautulli WebSocket :: Received ping, sending pong.")
ws.pong("Hi!")
elif frame.opcode == websocket.ABNF.OPCODE_PONG:
receive_pong()
@ -236,7 +236,7 @@ def process(opcode, data):
logger.websocket_debug(data)
info = json.loads(data)
except Exception as e:
logger.warn(u"Tautulli WebSocket :: Error decoding message from websocket: %s" % e)
logger.warn("Tautulli WebSocket :: Error decoding message from websocket: %s" % e)
logger.websocket_error(data)
return False
@ -250,26 +250,26 @@ def process(opcode, data):
time_line = info.get('PlaySessionStateNotification', info.get('_children', {}))
if not time_line:
logger.debug(u"Tautulli WebSocket :: Session found but unable to get timeline data.")
logger.debug("Tautulli WebSocket :: Session found but unable to get timeline data.")
return False
try:
activity = activity_handler.ActivityHandler(timeline=time_line[0])
activity.process()
except Exception as e:
logger.error(u"Tautulli WebSocket :: Failed to process session data: %s." % e)
logger.error("Tautulli WebSocket :: Failed to process session data: %s." % e)
if type == 'timeline':
time_line = info.get('TimelineEntry', info.get('_children', {}))
if not time_line:
logger.debug(u"Tautulli WebSocket :: Timeline event found but unable to get timeline data.")
logger.debug("Tautulli WebSocket :: Timeline event found but unable to get timeline data.")
return False
try:
activity = activity_handler.TimelineHandler(timeline=time_line[0])
activity.process()
except Exception as e:
logger.error(u"Tautulli WebSocket :: Failed to process timeline data: %s." % e)
logger.error("Tautulli WebSocket :: Failed to process timeline data: %s." % e)
return True

View file

@ -84,7 +84,7 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
# Register the new user / update the access tokens.
monitor_db = MonitorDatabase()
try:
logger.debug(u"Tautulli WebAuth :: Registering token for user '%s' in the database."
logger.debug("Tautulli WebAuth :: Registering token for user '%s' in the database."
% user_details['username'])
result = monitor_db.action('UPDATE users SET server_token = ? WHERE user_id = ?',
[server_token, user_details['user_id']])
@ -95,23 +95,23 @@ def plex_user_login(username=None, password=None, token=None, headers=None):
# Successful login
return user_details, 'guest'
else:
logger.warn(u"Tautulli WebAuth :: Unable to register user '%s' in database."
logger.warn("Tautulli WebAuth :: Unable to register user '%s' in database."
% user_details['username'])
return None
except Exception as e:
logger.warn(u"Tautulli WebAuth :: Unable to register user '%s' in database: %s."
logger.warn("Tautulli WebAuth :: Unable to register user '%s' in database: %s."
% (user_details['username'], e))
return None
else:
logger.warn(u"Tautulli WebAuth :: Unable to retrieve Plex.tv server token for user '%s'."
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv server token for user '%s'."
% user_details['username'])
return None
elif username:
logger.warn(u"Tautulli WebAuth :: Unable to retrieve Plex.tv user token for user '%s'." % username)
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv user token for user '%s'." % username)
return None
elif token:
logger.warn(u"Tautulli WebAuth :: Unable to retrieve Plex.tv user token for Plex OAuth.")
logger.warn("Tautulli WebAuth :: Unable to retrieve Plex.tv user token for Plex OAuth.")
return None
@ -256,12 +256,12 @@ class AuthController(object):
if success:
use_oauth = 'Plex OAuth' if oauth else 'form'
logger.debug(u"Tautulli WebAuth :: %s user '%s' logged into Tautulli using %s login."
logger.debug("Tautulli WebAuth :: %s user '%s' logged into Tautulli using %s login."
% (user_group.capitalize(), username, use_oauth))
def on_logout(self, username, user_group):
"""Called on logout"""
logger.debug(u"Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
logger.debug("Tautulli WebAuth :: %s user '%s' logged out of Tautulli." % (user_group.capitalize(), username))
def get_loginform(self, redirect_uri=''):
from plexpy.webserve import serve_template
@ -342,18 +342,18 @@ class AuthController(object):
elif admin_login == '1' and username:
self.on_login(username=username)
logger.debug(u"Tautulli WebAuth :: Invalid admin login attempt from '%s'." % username)
logger.debug("Tautulli WebAuth :: Invalid admin login attempt from '%s'." % username)
cherrypy.response.status = 401
return error_message
elif username:
self.on_login(username=username)
logger.debug(u"Tautulli WebAuth :: Invalid user login attempt from '%s'." % username)
logger.debug("Tautulli WebAuth :: Invalid user login attempt from '%s'." % username)
cherrypy.response.status = 401
return error_message
elif token:
self.on_login(username='Plex OAuth', oauth=True)
logger.debug(u"Tautulli WebAuth :: Invalid Plex OAuth login attempt.")
logger.debug("Tautulli WebAuth :: Invalid Plex OAuth login attempt.")
cherrypy.response.status = 401
return error_message

View file

@ -226,7 +226,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="current_activity.html", data=result)
else:
logger.warn(u"Unable to retrieve data for get_current_activity.")
logger.warn("Unable to retrieve data for get_current_activity.")
return serve_template(templatename="current_activity.html", data=None)
@cherrypy.expose
@ -324,7 +324,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="recently_added.html", data=result['recently_added'])
else:
logger.warn(u"Unable to retrieve data for get_recently_added.")
logger.warn("Unable to retrieve data for get_recently_added.")
return serve_template(templatename="recently_added.html", data=None)
@cherrypy.expose
@ -459,14 +459,14 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_library_sections.")
logger.warn("Unable to retrieve data for get_library_sections.")
@cherrypy.expose
@cherrypy.tools.json_out()
@requireAuth(member_of("admin"))
def refresh_libraries_list(self, **kwargs):
""" Manually refresh the libraries list. """
logger.info(u"Manual libraries list refresh requested.")
logger.info("Manual libraries list refresh requested.")
result = libraries.refresh_libraries()
if result:
@ -490,10 +490,10 @@ class WebInterface(object):
library_data = libraries.Libraries()
library_details = library_data.get_details(section_id=section_id)
except:
logger.warn(u"Unable to retrieve library details for section_id %s " % section_id)
logger.warn("Unable to retrieve library details for section_id %s " % section_id)
return serve_template(templatename="library.html", title="Library", data=None, config=config)
else:
logger.debug(u"Library page requested but no section_id received.")
logger.debug("Library page requested but no section_id received.")
return serve_template(templatename="library.html", title="Library", data=None, config=config)
return serve_template(templatename="library.html", title="Library", data=library_details, config=config)
@ -562,7 +562,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
else:
logger.warn(u"Unable to retrieve data for library_watch_time_stats.")
logger.warn("Unable to retrieve data for library_watch_time_stats.")
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
@cherrypy.expose
@ -580,7 +580,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats")
else:
logger.warn(u"Unable to retrieve data for library_user_stats.")
logger.warn("Unable to retrieve data for library_user_stats.")
return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats")
@cherrypy.expose
@ -598,7 +598,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched")
else:
logger.warn(u"Unable to retrieve data for library_recently_watched.")
logger.warn("Unable to retrieve data for library_recently_watched.")
return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched")
@cherrypy.expose
@ -616,7 +616,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="library_recently_added.html", data=result['recently_added'], title="Recently Added")
else:
logger.warn(u"Unable to retrieve data for library_recently_added.")
logger.warn("Unable to retrieve data for library_recently_added.")
return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added")
@cherrypy.expose
@ -781,9 +781,9 @@ class WebInterface(object):
if library_details:
return library_details
else:
logger.warn(u"Unable to retrieve data for get_library.")
logger.warn("Unable to retrieve data for get_library.")
else:
logger.warn(u"Library details requested but no section_id received.")
logger.warn("Library details requested but no section_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -828,9 +828,9 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_library_watch_time_stats.")
logger.warn("Unable to retrieve data for get_library_watch_time_stats.")
else:
logger.warn(u"Library watch time stats requested but no section_id received.")
logger.warn("Library watch time stats requested but no section_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -871,9 +871,9 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_library_user_stats.")
logger.warn("Unable to retrieve data for get_library_user_stats.")
else:
logger.warn(u"Library user stats requested but no section_id received.")
logger.warn("Library user stats requested but no section_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1094,7 +1094,7 @@ class WebInterface(object):
@requireAuth(member_of("admin"))
def refresh_users_list(self, **kwargs):
""" Manually refresh the users list. """
logger.info(u"Manual users list refresh requested.")
logger.info("Manual users list refresh requested.")
result = users.refresh_users()
if result:
@ -1113,10 +1113,10 @@ class WebInterface(object):
user_data = users.Users()
user_details = user_data.get_details(user_id=user_id)
except:
logger.warn(u"Unable to retrieve user details for user_id %s " % user_id)
logger.warn("Unable to retrieve user details for user_id %s " % user_id)
return serve_template(templatename="user.html", title="User", data=None)
else:
logger.debug(u"User page requested but no user_id received.")
logger.debug("User page requested but no user_id received.")
return serve_template(templatename="user.html", title="User", data=None)
return serve_template(templatename="user.html", title="User", data=user_details)
@ -1190,7 +1190,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
else:
logger.warn(u"Unable to retrieve data for user_watch_time_stats.")
logger.warn("Unable to retrieve data for user_watch_time_stats.")
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
@cherrypy.expose
@ -1208,7 +1208,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="user_player_stats.html", data=result, title="Player Stats")
else:
logger.warn(u"Unable to retrieve data for user_player_stats.")
logger.warn("Unable to retrieve data for user_player_stats.")
return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats")
@cherrypy.expose
@ -1226,7 +1226,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched")
else:
logger.warn(u"Unable to retrieve data for get_user_recently_watched.")
logger.warn("Unable to retrieve data for get_user_recently_watched.")
return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched")
@cherrypy.expose
@ -1392,9 +1392,9 @@ class WebInterface(object):
if user_details:
return user_details
else:
logger.warn(u"Unable to retrieve data for get_user.")
logger.warn("Unable to retrieve data for get_user.")
else:
logger.warn(u"User details requested but no user_id received.")
logger.warn("User details requested but no user_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1439,9 +1439,9 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_user_watch_time_stats.")
logger.warn("Unable to retrieve data for get_user_watch_time_stats.")
else:
logger.warn(u"User watch time stats requested but no user_id received.")
logger.warn("User watch time stats requested but no user_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1482,9 +1482,9 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_user_player_stats.")
logger.warn("Unable to retrieve data for get_user_player_stats.")
else:
logger.warn(u"User watch time stats requested but no user_id received.")
logger.warn("User watch time stats requested but no user_id received.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1890,7 +1890,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_date.")
logger.warn("Unable to retrieve data for get_plays_by_date.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1929,7 +1929,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_dayofweek.")
logger.warn("Unable to retrieve data for get_plays_by_dayofweek.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -1968,7 +1968,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_hourofday.")
logger.warn("Unable to retrieve data for get_plays_by_hourofday.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2007,7 +2007,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_per_month.")
logger.warn("Unable to retrieve data for get_plays_per_month.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2046,7 +2046,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_top_10_platforms.")
logger.warn("Unable to retrieve data for get_plays_by_top_10_platforms.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2085,7 +2085,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_top_10_users.")
logger.warn("Unable to retrieve data for get_plays_by_top_10_users.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2124,7 +2124,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_stream_type.")
logger.warn("Unable to retrieve data for get_plays_by_stream_type.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2163,7 +2163,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_source_resolution.")
logger.warn("Unable to retrieve data for get_plays_by_source_resolution.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2202,7 +2202,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_plays_by_stream_resolution.")
logger.warn("Unable to retrieve data for get_plays_by_stream_resolution.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2241,7 +2241,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_stream_type_by_top_10_users.")
logger.warn("Unable to retrieve data for get_stream_type_by_top_10_users.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -2280,7 +2280,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_stream_type_by_top_10_platforms.")
logger.warn("Unable to retrieve data for get_stream_type_by_top_10_platforms.")
@cherrypy.expose
@requireAuth()
@ -2312,7 +2312,7 @@ class WebInterface(object):
if result:
output = {"data": result}
else:
logger.warn(u"Unable to retrieve data for get_sync.")
logger.warn("Unable to retrieve data for get_sync.")
output = {"data": []}
return output
@ -2435,7 +2435,7 @@ class WebInterface(object):
try:
log_lines = {'data': log_reader.get_log_tail(window=window, parsed=True, log_type=log_type)}
except:
logger.warn(u"Unable to retrieve Plex Logs.")
logger.warn("Unable to retrieve Plex Logs.")
return log_lines
@ -2659,7 +2659,7 @@ class WebInterface(object):
except Exception as e:
result = 'error'
msg = 'Failed to clear the %s file.' % filename
logger.exception(u'Failed to clear the %s file: %s.' % (filename, e))
logger.exception('Failed to clear the %s file: %s.' % (filename, e))
return {'result': result, 'message': msg}
@ -2669,15 +2669,15 @@ class WebInterface(object):
plexpy.VERBOSE = not plexpy.VERBOSE
logger.initLogger(console=not plexpy.QUIET,
log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE)
logger.info(u"Verbose toggled, set to %s", plexpy.VERBOSE)
logger.debug(u"If you read this message, debug logging is available")
logger.info("Verbose toggled, set to %s", plexpy.VERBOSE)
logger.debug("If you read this message, debug logging is available")
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "logs")
@cherrypy.expose
@requireAuth()
def log_js_errors(self, page, message, file, line, **kwargs):
""" Logs javascript errors from the web interface. """
logger.error(u"WebUI :: /%s : %s. (%s:%s)" % (page.rpartition('/')[-1],
logger.error("WebUI :: /%s : %s. (%s:%s)" % (page.rpartition('/')[-1],
message,
file.rpartition('/')[-1].partition('?')[0],
line))
@ -3336,7 +3336,7 @@ class WebInterface(object):
notifier = notifiers.get_notifier_config(notifier_id=notifier_id)
if notifier:
logger.debug(u"Sending %s%s notification." % (test, notifier['agent_label']))
logger.debug("Sending %s%s notification." % (test, notifier['agent_label']))
notification_handler.add_notifier_each(notifier_id=notifier_id,
notify_action=notify_action,
subject=subject,
@ -3345,10 +3345,10 @@ class WebInterface(object):
**kwargs)
return {'result': 'success', 'message': 'Notification queued.'}
else:
logger.debug(u"Unable to send %snotification, invalid notifier_id %s." % (test, notifier_id))
logger.debug("Unable to send %snotification, invalid notifier_id %s." % (test, notifier_id))
return {'result': 'error', 'message': 'Invalid notifier id %s.' % notifier_id}
else:
logger.debug(u"Unable to send %snotification, no notifier_id received." % test)
logger.debug("Unable to send %snotification, no notifier_id received." % test)
return {'result': 'error', 'message': 'No notifier id received.'}
@cherrypy.expose
@ -3421,7 +3421,7 @@ class WebInterface(object):
if result:
osx_notify = notifiers.OSX()
osx_notify.notify(subject='Registered', body='Success :-)', subtitle=result)
# logger.info(u"Registered %s, to re-register a different app, delete this app first" % result)
# logger.info("Registered %s, to re-register a different app, delete this app first" % result)
else:
logger.warn(msg)
return msg
@ -3585,7 +3585,7 @@ class WebInterface(object):
elif app == 'plexivity':
return serve_template(templatename="app_import.html", title="Import Plexivity Database", app="Plexivity")
logger.warn(u"No app specified for import.")
logger.warn("No app specified for import.")
return
@cherrypy.expose
@ -3616,7 +3616,7 @@ class WebInterface(object):
if result:
return result['auth_token']
else:
logger.warn(u"Unable to retrieve Plex.tv token.")
logger.warn("Unable to retrieve Plex.tv token.")
return None
@cherrypy.expose
@ -3741,7 +3741,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_server_pref.")
logger.warn("Unable to retrieve data for get_server_pref.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -3751,7 +3751,7 @@ class WebInterface(object):
while not apikey or apikey == plexpy.CONFIG.API_KEY or mobile_app.get_mobile_device_by_token(device_token=apikey):
apikey = plexpy.generate_uuid()
logger.info(u"New API key generated.")
logger.info("New API key generated.")
logger._BLACKLIST_WORDS.add(apikey)
if device == 'true':
@ -3862,7 +3862,7 @@ class WebInterface(object):
@requireAuth(member_of("admin"))
def checkout_git_branch(self, git_remote=None, git_branch=None, **kwargs):
if git_branch == plexpy.CONFIG.GIT_BRANCH:
logger.error(u"Already on the %s branch" % git_branch)
logger.error("Already on the %s branch" % git_branch)
raise cherrypy.HTTPRedirect(plexpy.HTTP_ROOT + "home")
# Set the new git remote and branch
@ -3942,7 +3942,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="info_children_list.html", data=result, title="Children List")
else:
logger.warn(u"Unable to retrieve data for get_item_children.")
logger.warn("Unable to retrieve data for get_item_children.")
return serve_template(templatename="info_children_list.html", data=None, title="Children List")
@cherrypy.expose
@ -4098,10 +4098,10 @@ class WebInterface(object):
return result[0]
else:
raise Exception(u'PMS image request failed')
raise Exception('PMS image request failed')
except Exception as e:
logger.warn(u'Failed to get image %s, falling back to %s.' % (img, fallback))
logger.warn('Failed to get image %s, falling back to %s.' % (img, fallback))
fbi = None
if fallback == 'poster':
fbi = common.DEFAULT_POSTER_THUMB
@ -4246,7 +4246,7 @@ class WebInterface(object):
except OSError as e:
result = 'error'
msg = 'Failed to delete %s.' % cache_dir
logger.exception(u'Failed to delete %s: %s.' % (cache_dir, e))
logger.exception('Failed to delete %s: %s.' % (cache_dir, e))
return {'result': result, 'message': msg}
try:
@ -4254,7 +4254,7 @@ class WebInterface(object):
except OSError as e:
result = 'error'
msg = 'Failed to make %s.' % cache_dir
logger.exception(u'Failed to create %s: %s.' % (cache_dir, e))
logger.exception('Failed to create %s: %s.' % (cache_dir, e))
return {'result': result, 'message': msg}
logger.info(msg)
@ -4369,7 +4369,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for search_results.")
logger.warn("Unable to retrieve data for search_results.")
@cherrypy.expose
@requireAuth()
@ -4387,7 +4387,7 @@ class WebInterface(object):
if result:
return serve_template(templatename="info_search_results_list.html", data=result, title="Search Result List")
else:
logger.warn(u"Unable to retrieve data for get_search_results_children.")
logger.warn("Unable to retrieve data for get_search_results_children.")
return serve_template(templatename="info_search_results_list.html", data=None, title="Search Result List")
@ -4407,7 +4407,7 @@ class WebInterface(object):
if query:
return serve_template(templatename="update_metadata.html", query=query, update=update, title="Info")
else:
logger.warn(u"Unable to retrieve data for update_metadata.")
logger.warn("Unable to retrieve data for update_metadata.")
return serve_template(templatename="update_metadata.html", query=query, update=update, title="Info")
@cherrypy.expose
@ -4476,7 +4476,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_new_rating_keys.")
logger.warn("Unable to retrieve data for get_new_rating_keys.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4505,7 +4505,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_old_rating_keys.")
logger.warn("Unable to retrieve data for get_old_rating_keys.")
@cherrypy.expose
@ -4519,7 +4519,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_pms_sessions_json.")
logger.warn("Unable to retrieve data for get_pms_sessions_json.")
return False
@cherrypy.expose
@ -4678,7 +4678,7 @@ class WebInterface(object):
if metadata:
return metadata
else:
logger.warn(u"Unable to retrieve data for get_metadata_details.")
logger.warn("Unable to retrieve data for get_metadata_details.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4733,7 +4733,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_recently_added_details.")
logger.warn("Unable to retrieve data for get_recently_added_details.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4747,7 +4747,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_friends_list.")
logger.warn("Unable to retrieve data for get_friends_list.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4761,7 +4761,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_user_details.")
logger.warn("Unable to retrieve data for get_user_details.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4775,7 +4775,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_server_list.")
logger.warn("Unable to retrieve data for get_server_list.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4788,7 +4788,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_sync_lists.")
logger.warn("Unable to retrieve data for get_sync_lists.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4800,7 +4800,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_servers.")
logger.warn("Unable to retrieve data for get_servers.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4833,7 +4833,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_servers_info.")
logger.warn("Unable to retrieve data for get_servers_info.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4863,7 +4863,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_server_identity.")
logger.warn("Unable to retrieve data for get_server_identity.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -4888,7 +4888,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_server_friendly_name.")
logger.warn("Unable to retrieve data for get_server_friendly_name.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5168,9 +5168,9 @@ class WebInterface(object):
return result
else:
logger.warn(u"Unable to retrieve data for get_activity.")
logger.warn("Unable to retrieve data for get_activity.")
except Exception as e:
logger.exception(u"Unable to retrieve data for get_activity: %s" % e)
logger.exception("Unable to retrieve data for get_activity: %s" % e)
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5208,7 +5208,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_full_libraries_list.")
logger.warn("Unable to retrieve data for get_full_libraries_list.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5256,7 +5256,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_full_users_list.")
logger.warn("Unable to retrieve data for get_full_users_list.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5310,7 +5310,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_synced_items.")
logger.warn("Unable to retrieve data for get_synced_items.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5323,7 +5323,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_sync_transcode_queue.")
logger.warn("Unable to retrieve data for get_sync_transcode_queue.")
@cherrypy.expose
@cherrypy.tools.json_out()
@ -5418,7 +5418,7 @@ class WebInterface(object):
if result:
return result
else:
logger.warn(u"Unable to retrieve data for get_home_stats.")
logger.warn("Unable to retrieve data for get_home_stats.")
@cherrypy.expose
@requireAuth(member_of("admin"))
@ -5778,7 +5778,7 @@ class WebInterface(object):
newsletter = newsletters.get_newsletter_config(newsletter_id=newsletter_id)
if newsletter:
logger.debug(u"Sending %s%s newsletter." % (test, newsletter['agent_label']))
logger.debug("Sending %s%s newsletter." % (test, newsletter['agent_label']))
newsletter_handler.add_newsletter_each(newsletter_id=newsletter_id,
notify_action=notify_action,
subject=subject,
@ -5787,10 +5787,10 @@ class WebInterface(object):
**kwargs)
return {'result': 'success', 'message': 'Newsletter queued.'}
else:
logger.debug(u"Unable to send %snewsletter, invalid newsletter_id %s." % (test, newsletter_id))
logger.debug("Unable to send %snewsletter, invalid newsletter_id %s." % (test, newsletter_id))
return {'result': 'error', 'message': 'Invalid newsletter id %s.' % newsletter_id}
else:
logger.debug(u"Unable to send %snotification, no newsletter_id received." % test)
logger.debug("Unable to send %snotification, no newsletter_id received." % test)
return {'result': 'error', 'message': 'No newsletter id received.'}
@cherrypy.expose
@ -5873,10 +5873,10 @@ class WebInterface(object):
return newsletter_agent.generate_newsletter(preview=preview)
logger.error(u"Failed to retrieve newsletter: Invalid newsletter_id %s" % newsletter_id)
logger.error("Failed to retrieve newsletter: Invalid newsletter_id %s" % newsletter_id)
return "Failed to retrieve newsletter: invalid newsletter_id parameter"
logger.error(u"Failed to retrieve newsletter: Missing newsletter_id parameter.")
logger.error("Failed to retrieve newsletter: Missing newsletter_id parameter.")
return "Failed to retrieve newsletter: missing newsletter_id parameter"
@cherrypy.expose

View file

@ -26,7 +26,7 @@ from plexpy.webserve import WebInterface
def start():
logger.info(u"Tautulli WebStart :: Initializing Tautulli web server...")
logger.info("Tautulli WebStart :: Initializing Tautulli web server...")
web_config = {
'http_port': plexpy.HTTP_PORT,
'http_host': plexpy.CONFIG.HTTP_HOST,
@ -45,12 +45,12 @@ def start():
def stop():
logger.info(u"Tautulli WebStart :: Stopping Tautulli web server...")
logger.info("Tautulli WebStart :: Stopping Tautulli web server...")
cherrypy.engine.exit()
def restart():
logger.info(u"Tautulli WebStart :: Restarting Tautulli web server...")
logger.info("Tautulli WebStart :: Restarting Tautulli web server...")
stop()
start()
@ -69,11 +69,11 @@ def initialize(options):
(not (https_cert and os.path.exists(https_cert)) or
not (https_key and os.path.exists(https_key))):
if not create_https_certificates(https_cert, https_key):
logger.warn(u"Tautulli WebStart :: Unable to create certificate and key. Disabling HTTPS")
logger.warn("Tautulli WebStart :: Unable to create certificate and key. Disabling HTTPS")
enable_https = False
if not (os.path.exists(https_cert) and os.path.exists(https_key)):
logger.warn(u"Tautulli WebStart :: Disabled HTTPS because of missing certificate and key.")
logger.warn("Tautulli WebStart :: Disabled HTTPS because of missing certificate and key.")
enable_https = False
options_dict = {
@ -107,7 +107,7 @@ def initialize(options):
if plexpy.CONFIG.HTTP_PLEX_ADMIN:
login_allowed.append("Plex admin")
logger.info(u"Tautulli WebStart :: Web server authentication is enabled: %s.", ' and '.join(login_allowed))
logger.info("Tautulli WebStart :: Web server authentication is enabled: %s.", ' and '.join(login_allowed))
if options['http_basic_auth']:
plexpy.AUTH_ENABLED = False
@ -245,7 +245,7 @@ def initialize(options):
cherrypy.tree.mount(BaseRedirect(), '/')
try:
logger.info(u"Tautulli WebStart :: Starting Tautulli web server on %s://%s:%d%s", protocol,
logger.info("Tautulli WebStart :: Starting Tautulli web server on %s://%s:%d%s", protocol,
options['http_host'], options['http_port'], options['http_root'])
cherrypy.process.servers.check_port(str(options['http_host']), options['http_port'])
if not plexpy.DEV:
@ -274,7 +274,7 @@ class BaseRedirect(object):
def proxy():
# logger.debug(u"REQUEST URI: %s, HEADER [X-Forwarded-Host]: %s, [X-Host]: %s, [Origin]: %s, [Host]: %s",
# logger.debug("REQUEST URI: %s, HEADER [X-Forwarded-Host]: %s, [X-Host]: %s, [Origin]: %s, [Host]: %s",
# cherrypy.request.wsgi_environ['REQUEST_URI'],
# cherrypy.request.headers.get('X-Forwarded-Host'),
# cherrypy.request.headers.get('X-Host'),
@ -290,7 +290,7 @@ def proxy():
local = 'Origin'
elif cherrypy.request.headers.get('Host'): # nginx
local = 'Host'
# logger.debug(u"cherrypy.tools.proxy.local set to [%s]", local)
# logger.debug("cherrypy.tools.proxy.local set to [%s]", local)
# Call original cherrypy proxy tool with the new local
cherrypy.lib.cptools.proxy(local=local)