diff --git a/plexpy/__init__.py b/plexpy/__init__.py index 2a769374..eb1f73c2 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -242,6 +242,9 @@ def initialize(config_file): logger.info("Python {}".format( sys.version.replace('\n', '') )) + logger.info("SQLite {}".format( + sqlite3.sqlite_version + )) logger.info("Program Dir: {}".format( PROG_DIR @@ -618,1646 +621,1646 @@ def dbcheck(): # schema table :: This is a table which keeps track of the database version c_db.execute( - 'CREATE TABLE IF NOT EXISTS version_info (key TEXT UNIQUE, value TEXT)' + "CREATE TABLE IF NOT EXISTS version_info (key TEXT UNIQUE, value TEXT)" ) # sessions table :: This is a temp table that logs currently active sessions c_db.execute( - 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, ' - 'transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, ' - 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' - 'ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, ' - 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'originally_available_at TEXT, added_at INTEGER, guid TEXT, ' - 'view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, ' - 'transcode_decision TEXT, container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, ' - 'video_codec TEXT, video_bitrate INTEGER, video_resolution TEXT, video_width INTEGER, video_height INTEGER, ' - 'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, ' - 'video_dynamic_range TEXT, aspect_ratio TEXT, ' - 'audio_codec TEXT, audio_bitrate INTEGER, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, ' - 'subtitle_codec TEXT, subtitle_forced INTEGER, subtitle_language TEXT, ' - 'stream_bitrate INTEGER, stream_video_resolution TEXT, quality_profile TEXT, ' - 'stream_container_decision TEXT, stream_container TEXT, ' - 'stream_video_decision TEXT, stream_video_codec TEXT, stream_video_bitrate INTEGER, stream_video_width INTEGER, ' - 'stream_video_height INTEGER, stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, ' - 'stream_video_dynamic_range TEXT, ' - 'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, ' - 'stream_audio_language TEXT, stream_audio_language_code TEXT, ' - 'subtitles INTEGER, stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, ' - 'stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, ' - 'transcode_protocol TEXT, transcode_container TEXT, ' - 'transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER,' - 'transcode_width INTEGER, transcode_height INTEGER, ' - 'transcode_hw_decoding INTEGER, transcode_hw_encoding INTEGER, ' - 'optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT, ' - 'synced_version INTEGER, synced_version_profile TEXT, ' - 'live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' - 'secure INTEGER, relayed INTEGER, ' - 'buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, ' - 'intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, ' - 'initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, ' - 'rating_key_websocket TEXT)' + "CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, session_key INTEGER, session_id TEXT, " + "transcode_key TEXT, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, stopped INTEGER, " + "paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, " + "ip_address TEXT, machine_id TEXT, bandwidth INTEGER, location TEXT, player TEXT, product TEXT, platform TEXT, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, year INTEGER, " + "parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "originally_available_at TEXT, added_at INTEGER, guid TEXT, " + "view_offset INTEGER DEFAULT 0, duration INTEGER, video_decision TEXT, audio_decision TEXT, " + "transcode_decision TEXT, container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, " + "video_codec TEXT, video_bitrate INTEGER, video_resolution TEXT, video_width INTEGER, video_height INTEGER, " + "video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, " + "video_dynamic_range TEXT, aspect_ratio TEXT, " + "audio_codec TEXT, audio_bitrate INTEGER, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, " + "subtitle_codec TEXT, subtitle_forced INTEGER, subtitle_language TEXT, " + "stream_bitrate INTEGER, stream_video_resolution TEXT, quality_profile TEXT, " + "stream_container_decision TEXT, stream_container TEXT, " + "stream_video_decision TEXT, stream_video_codec TEXT, stream_video_bitrate INTEGER, stream_video_width INTEGER, " + "stream_video_height INTEGER, stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, " + "stream_video_dynamic_range TEXT, " + "stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, " + "stream_audio_language TEXT, stream_audio_language_code TEXT, " + "subtitles INTEGER, stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, " + "stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, " + "transcode_protocol TEXT, transcode_container TEXT, " + "transcode_video_codec TEXT, transcode_audio_codec TEXT, transcode_audio_channels INTEGER," + "transcode_width INTEGER, transcode_height INTEGER, " + "transcode_hw_decoding INTEGER, transcode_hw_encoding INTEGER, " + "optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT, " + "synced_version INTEGER, synced_version_profile TEXT, " + "live INTEGER, live_uuid TEXT, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, " + "secure INTEGER, relayed INTEGER, " + "buffer_count INTEGER DEFAULT 0, buffer_last_triggered INTEGER, last_paused INTEGER, watched INTEGER DEFAULT 0, " + "intro INTEGER DEFAULT 0, credits INTEGER DEFAULT 0, commercial INTEGER DEFAULT 0, marker INTEGER DEFAULT 0, " + "initial_stream INTEGER DEFAULT 1, write_attempts INTEGER DEFAULT 0, raw_stream_info TEXT, " + "rating_key_websocket TEXT)" ) # sessions_continued table :: This is a temp table that keeps track of continued streaming sessions c_db.execute( - 'CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)' + "CREATE TABLE IF NOT EXISTS sessions_continued (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER, machine_id TEXT, media_type TEXT, stopped INTEGER)" ) # session_history table :: This is a history table which logs essential stream details c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, ' - 'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' - 'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, product TEXT, product_version TEXT, ' - 'platform TEXT, platform_version TEXT, profile TEXT, machine_id TEXT, ' - 'bandwidth INTEGER, location TEXT, quality_profile TEXT, secure INTEGER, relayed INTEGER, ' - 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, section_id INTEGER, ' - 'view_offset INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, reference_id INTEGER, " + "started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, " + "ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, product TEXT, product_version TEXT, " + "platform TEXT, platform_version TEXT, profile TEXT, machine_id TEXT, " + "bandwidth INTEGER, location TEXT, quality_profile TEXT, secure INTEGER, relayed INTEGER, " + "parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, section_id INTEGER, " + "view_offset INTEGER DEFAULT 0)" ) - # session_history_media_info table :: This is a table which logs each session's media info + # session_history_media_info table :: This is a table which logs each session"s media info c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, rating_key INTEGER, ' - 'video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, ' - 'container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, video_bitrate INTEGER, video_bit_depth INTEGER, ' - 'video_codec TEXT, video_codec_level TEXT, video_width INTEGER, video_height INTEGER, video_resolution TEXT, ' - 'video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, video_dynamic_range TEXT, aspect_ratio TEXT, ' - 'audio_bitrate INTEGER, audio_codec TEXT, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, ' - 'subtitles INTEGER, subtitle_codec TEXT, subtitle_forced, subtitle_language TEXT,' - 'transcode_protocol TEXT, transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, ' - 'transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER, ' - 'transcode_hw_requested INTEGER, transcode_hw_full_pipeline INTEGER, ' - 'transcode_hw_decode TEXT, transcode_hw_decode_title TEXT, transcode_hw_decoding INTEGER, ' - 'transcode_hw_encode TEXT, transcode_hw_encode_title TEXT, transcode_hw_encoding INTEGER, ' - 'stream_container TEXT, stream_container_decision TEXT, stream_bitrate INTEGER, ' - 'stream_video_decision TEXT, stream_video_bitrate INTEGER, stream_video_codec TEXT, stream_video_codec_level TEXT, ' - 'stream_video_bit_depth INTEGER, stream_video_height INTEGER, stream_video_width INTEGER, stream_video_resolution TEXT, ' - 'stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, stream_video_dynamic_range TEXT, ' - 'stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, ' - 'stream_audio_language TEXT, stream_audio_language_code TEXT, ' - 'stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, ' - 'stream_subtitle_container TEXT, stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, ' - 'synced_version INTEGER, synced_version_profile TEXT, ' - 'optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT)' + "CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, rating_key INTEGER, " + "video_decision TEXT, audio_decision TEXT, transcode_decision TEXT, duration INTEGER DEFAULT 0, " + "container TEXT, bitrate INTEGER, width INTEGER, height INTEGER, video_bitrate INTEGER, video_bit_depth INTEGER, " + "video_codec TEXT, video_codec_level TEXT, video_width INTEGER, video_height INTEGER, video_resolution TEXT, " + "video_framerate TEXT, video_scan_type TEXT, video_full_resolution TEXT, video_dynamic_range TEXT, aspect_ratio TEXT, " + "audio_bitrate INTEGER, audio_codec TEXT, audio_channels INTEGER, audio_language TEXT, audio_language_code TEXT, " + "subtitles INTEGER, subtitle_codec TEXT, subtitle_forced, subtitle_language TEXT," + "transcode_protocol TEXT, transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, " + "transcode_audio_channels INTEGER, transcode_width INTEGER, transcode_height INTEGER, " + "transcode_hw_requested INTEGER, transcode_hw_full_pipeline INTEGER, " + "transcode_hw_decode TEXT, transcode_hw_decode_title TEXT, transcode_hw_decoding INTEGER, " + "transcode_hw_encode TEXT, transcode_hw_encode_title TEXT, transcode_hw_encoding INTEGER, " + "stream_container TEXT, stream_container_decision TEXT, stream_bitrate INTEGER, " + "stream_video_decision TEXT, stream_video_bitrate INTEGER, stream_video_codec TEXT, stream_video_codec_level TEXT, " + "stream_video_bit_depth INTEGER, stream_video_height INTEGER, stream_video_width INTEGER, stream_video_resolution TEXT, " + "stream_video_framerate TEXT, stream_video_scan_type TEXT, stream_video_full_resolution TEXT, stream_video_dynamic_range TEXT, " + "stream_audio_decision TEXT, stream_audio_codec TEXT, stream_audio_bitrate INTEGER, stream_audio_channels INTEGER, " + "stream_audio_language TEXT, stream_audio_language_code TEXT, " + "stream_subtitle_decision TEXT, stream_subtitle_codec TEXT, " + "stream_subtitle_container TEXT, stream_subtitle_forced INTEGER, stream_subtitle_language TEXT, " + "synced_version INTEGER, synced_version_profile TEXT, " + "optimized_version INTEGER, optimized_version_profile TEXT, optimized_version_title TEXT)" ) - # session_history_metadata table :: This is a table which logs each session's media metadata + # session_history_metadata table :: This is a table which logs each session"s media metadata c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' - 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' - 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' - 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, ' - 'marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)' + "CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, " + "art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, " + "last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, " + "duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, " + "labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT, " + "marker_credits_first INTEGER DEFAULT NULL, marker_credits_final INTEGER DEFAULT NULL)" ) # users table :: This table keeps record of the friends list c_db.execute( - 'CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' - 'thumb TEXT, custom_avatar_url TEXT, title TEXT, email TEXT, ' - 'is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, ' - 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, ' - 'do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, ' - 'allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, ' - 'filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)' + "CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, " + "thumb TEXT, custom_avatar_url TEXT, title TEXT, email TEXT, " + "is_active INTEGER DEFAULT 1, is_admin INTEGER DEFAULT 0, is_home_user INTEGER DEFAULT NULL, " + "is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, " + "do_notify INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0, " + "allow_guest INTEGER DEFAULT 0, user_token TEXT, server_token TEXT, shared_libraries TEXT, " + "filter_all TEXT, filter_movies TEXT, filter_tv TEXT, filter_music TEXT, filter_photos TEXT)" ) # library_sections table :: This table keeps record of the servers library sections c_db.execute( - 'CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, agent TEXT, ' - 'thumb TEXT, custom_thumb_url TEXT, art TEXT, custom_art_url TEXT, ' - 'count INTEGER, parent_count INTEGER, child_count INTEGER, is_active INTEGER DEFAULT 1, ' - 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' - 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))' + "CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, agent TEXT, " + "thumb TEXT, custom_thumb_url TEXT, art TEXT, custom_art_url TEXT, " + "count INTEGER, parent_count INTEGER, child_count INTEGER, is_active INTEGER DEFAULT 1, " + "do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, " + "deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))" ) # user_login table :: This table keeps record of the Tautulli guest logins c_db.execute( - 'CREATE TABLE IF NOT EXISTS user_login (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'timestamp INTEGER, user_id INTEGER, user TEXT, user_group TEXT, ' - 'ip_address TEXT, host TEXT, user_agent TEXT, success INTEGER DEFAULT 1,' - 'expiry TEXT, jwt_token TEXT)' + "CREATE TABLE IF NOT EXISTS user_login (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "timestamp INTEGER, user_id INTEGER, user TEXT, user_group TEXT, " + "ip_address TEXT, host TEXT, user_agent TEXT, success INTEGER DEFAULT 1," + "expiry TEXT, jwt_token TEXT)" ) # notifiers table :: This table keeps record of the notification agent settings c_db.execute( - 'CREATE TABLE IF NOT EXISTS notifiers (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, ' - 'on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, ' - 'on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, ' - 'on_error INTEGER DEFAULT 0, ' - 'on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, ' - 'on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, ' - 'on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, ' - 'on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, ' - 'on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, ' - 'on_plexpydbcorrupt INTEGER DEFAULT 0, ' - 'on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, ' - 'on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, ' - 'on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT,' - 'on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, ' - 'on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, ' - 'on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, ' - 'on_plexpydbcorrupt_subject TEXT, ' - 'on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, ' - 'on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, ' - 'on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, ' - 'on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, ' - 'on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, ' - 'on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, ' - 'on_plexpydbcorrupt_body TEXT, ' - 'custom_conditions TEXT, custom_conditions_logic TEXT)' + "CREATE TABLE IF NOT EXISTS notifiers (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, friendly_name TEXT, notifier_config TEXT, " + "on_play INTEGER DEFAULT 0, on_stop INTEGER DEFAULT 0, on_pause INTEGER DEFAULT 0, " + "on_resume INTEGER DEFAULT 0, on_change INTEGER DEFAULT 0, on_buffer INTEGER DEFAULT 0, " + "on_error INTEGER DEFAULT 0, " + "on_intro INTEGER DEFAULT 0, on_credits INTEGER DEFAULT 0, on_commercial INTEGER DEFAULT 0, " + "on_watched INTEGER DEFAULT 0, on_created INTEGER DEFAULT 0, " + "on_extdown INTEGER DEFAULT 0, on_intdown INTEGER DEFAULT 0, " + "on_extup INTEGER DEFAULT 0, on_intup INTEGER DEFAULT 0, on_pmsupdate INTEGER DEFAULT 0, " + "on_concurrent INTEGER DEFAULT 0, on_newdevice INTEGER DEFAULT 0, on_plexpyupdate INTEGER DEFAULT 0, " + "on_plexpydbcorrupt INTEGER DEFAULT 0, " + "on_play_subject TEXT, on_stop_subject TEXT, on_pause_subject TEXT, " + "on_resume_subject TEXT, on_change_subject TEXT, on_buffer_subject TEXT, on_error_subject TEXT, " + "on_intro_subject TEXT, on_credits_subject TEXT, on_commercial_subject TEXT," + "on_watched_subject TEXT, on_created_subject TEXT, on_extdown_subject TEXT, on_intdown_subject TEXT, " + "on_extup_subject TEXT, on_intup_subject TEXT, on_pmsupdate_subject TEXT, " + "on_concurrent_subject TEXT, on_newdevice_subject TEXT, on_plexpyupdate_subject TEXT, " + "on_plexpydbcorrupt_subject TEXT, " + "on_play_body TEXT, on_stop_body TEXT, on_pause_body TEXT, " + "on_resume_body TEXT, on_change_body TEXT, on_buffer_body TEXT, on_error_body TEXT, " + "on_intro_body TEXT, on_credits_body TEXT, on_commercial_body TEXT, " + "on_watched_body TEXT, on_created_body TEXT, on_extdown_body TEXT, on_intdown_body TEXT, " + "on_extup_body TEXT, on_intup_body TEXT, on_pmsupdate_body TEXT, " + "on_concurrent_body TEXT, on_newdevice_body TEXT, on_plexpyupdate_body TEXT, " + "on_plexpydbcorrupt_body TEXT, " + "custom_conditions TEXT, custom_conditions_logic TEXT)" ) # notify_log table :: This is a table which logs notifications sent c_db.execute( - 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'user_id INTEGER, user TEXT, notifier_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, script_args TEXT, success INTEGER DEFAULT 0, tag TEXT)' + "CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "user_id INTEGER, user TEXT, notifier_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, script_args TEXT, success INTEGER DEFAULT 0, tag TEXT)" ) # newsletters table :: This table keeps record of the newsletter settings c_db.execute( - 'CREATE TABLE IF NOT EXISTS newsletters (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, ' - 'friendly_name TEXT, newsletter_config TEXT, email_config TEXT, ' - 'subject TEXT, body TEXT, message TEXT, ' - 'cron TEXT NOT NULL DEFAULT \'0 0 * * 0\', active INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS newsletters (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, " + "friendly_name TEXT, newsletter_config TEXT, email_config TEXT, " + "subject TEXT, body TEXT, message TEXT, " + "cron TEXT NOT NULL DEFAULT '0 0 * * 0', active INTEGER DEFAULT 0)" ) # newsletter_log table :: This is a table which logs newsletters sent c_db.execute( - 'CREATE TABLE IF NOT EXISTS newsletter_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'newsletter_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, message_text TEXT, start_date TEXT, end_date TEXT, ' - 'start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, filename TEXT, email_msg_id TEXT, ' - 'success INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS newsletter_log (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "newsletter_id INTEGER, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, message_text TEXT, start_date TEXT, end_date TEXT, " + "start_time INTEGER, end_time INTEGER, uuid TEXT UNIQUE, filename TEXT, email_msg_id TEXT, " + "success INTEGER DEFAULT 0)" ) # recently_added table :: This table keeps record of recently added items c_db.execute( - 'CREATE TABLE IF NOT EXISTS recently_added (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'added_at INTEGER, pms_identifier TEXT, section_id INTEGER, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, ' - 'media_info TEXT)' + "CREATE TABLE IF NOT EXISTS recently_added (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "added_at INTEGER, pms_identifier TEXT, section_id INTEGER, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, " + "media_info TEXT)" ) # mobile_devices table :: This table keeps record of devices linked with the mobile app c_db.execute( - 'CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, ' - 'platform TEXT, version TEXT, friendly_name TEXT, ' - 'onesignal_id TEXT, last_seen INTEGER, official INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, " + "platform TEXT, version TEXT, friendly_name TEXT, " + "onesignal_id TEXT, last_seen INTEGER, official INTEGER DEFAULT 0)" ) # tvmaze_lookup table :: This table keeps record of the TVmaze lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS tvmaze_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, ' - 'tvmaze_id INTEGER, tvmaze_url TEXT, tvmaze_json TEXT)' + "CREATE TABLE IF NOT EXISTS tvmaze_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, " + "tvmaze_id INTEGER, tvmaze_url TEXT, tvmaze_json TEXT)" ) # themoviedb_lookup table :: This table keeps record of the TheMovieDB lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS themoviedb_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, ' - 'themoviedb_id INTEGER, themoviedb_url TEXT, themoviedb_json TEXT)' + "CREATE TABLE IF NOT EXISTS themoviedb_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, thetvdb_id INTEGER, imdb_id TEXT, " + "themoviedb_id INTEGER, themoviedb_url TEXT, themoviedb_json TEXT)" ) # musicbrainz_lookup table :: This table keeps record of the MusicBrainz lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS musicbrainz_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'rating_key INTEGER, musicbrainz_id INTEGER, musicbrainz_url TEXT, musicbrainz_type TEXT, ' - 'musicbrainz_json TEXT)' + "CREATE TABLE IF NOT EXISTS musicbrainz_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "rating_key INTEGER, musicbrainz_id INTEGER, musicbrainz_url TEXT, musicbrainz_type TEXT, " + "musicbrainz_json TEXT)" ) # image_hash_lookup table :: This table keeps record of the image hash lookups c_db.execute( - 'CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT UNIQUE, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, ' - 'opacity INTEGER, background TEXT, blur INTEGER, fallback TEXT)' + "CREATE TABLE IF NOT EXISTS image_hash_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT UNIQUE, img TEXT, rating_key INTEGER, width INTEGER, height INTEGER, " + "opacity INTEGER, background TEXT, blur INTEGER, fallback TEXT)" ) # imgur_lookup table :: This table keeps record of the Imgur uploads c_db.execute( - 'CREATE TABLE IF NOT EXISTS imgur_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT, imgur_title TEXT, imgur_url TEXT, delete_hash TEXT)' + "CREATE TABLE IF NOT EXISTS imgur_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT, imgur_title TEXT, imgur_url TEXT, delete_hash TEXT)" ) # cloudinary_lookup table :: This table keeps record of the Cloudinary uploads c_db.execute( - 'CREATE TABLE IF NOT EXISTS cloudinary_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)' + "CREATE TABLE IF NOT EXISTS cloudinary_lookup (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)" ) # exports table :: This table keeps record of the exported files c_db.execute( - 'CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'timestamp INTEGER, section_id INTEGER, user_id INTEGER, rating_key INTEGER, media_type TEXT, ' - 'title TEXT, file_format TEXT, ' - 'metadata_level INTEGER, media_info_level INTEGER, ' - 'thumb_level INTEGER DEFAULT 0, art_level INTEGER DEFAULT 0, ' - 'custom_fields TEXT, individual_files INTEGER DEFAULT 0, ' - 'file_size INTEGER DEFAULT 0, complete INTEGER DEFAULT 0, ' - 'exported_items INTEGER DEFAULT 0, total_items INTEGER DEFAULT 0)' + "CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "timestamp INTEGER, section_id INTEGER, user_id INTEGER, rating_key INTEGER, media_type TEXT, " + "title TEXT, file_format TEXT, " + "metadata_level INTEGER, media_info_level INTEGER, " + "thumb_level INTEGER DEFAULT 0, art_level INTEGER DEFAULT 0, " + "custom_fields TEXT, individual_files INTEGER DEFAULT 0, " + "file_size INTEGER DEFAULT 0, complete INTEGER DEFAULT 0, " + "exported_items INTEGER DEFAULT 0, total_items INTEGER DEFAULT 0)" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT started FROM sessions') + c_db.execute("SELECT started FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN started INTEGER' + "ALTER TABLE sessions ADD COLUMN started INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN paused_counter INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN state TEXT' + "ALTER TABLE sessions ADD COLUMN state TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN user TEXT' + "ALTER TABLE sessions ADD COLUMN user TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN machine_id TEXT' + "ALTER TABLE sessions ADD COLUMN machine_id TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT title FROM sessions') + c_db.execute("SELECT title FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN title TEXT' + "ALTER TABLE sessions ADD COLUMN title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_title TEXT' + "ALTER TABLE sessions ADD COLUMN parent_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_title TEXT' + "ALTER TABLE sessions ADD COLUMN grandparent_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN friendly_name TEXT' + "ALTER TABLE sessions ADD COLUMN friendly_name TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN player TEXT' + "ALTER TABLE sessions ADD COLUMN player TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN user_id INTEGER' + "ALTER TABLE sessions ADD COLUMN user_id INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT ip_address FROM sessions') + c_db.execute("SELECT ip_address FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN ip_address TEXT' + "ALTER TABLE sessions ADD COLUMN ip_address TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN platform TEXT' + "ALTER TABLE sessions ADD COLUMN platform TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER' + "ALTER TABLE sessions ADD COLUMN parent_rating_key INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER' + "ALTER TABLE sessions ADD COLUMN grandparent_rating_key INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN view_offset INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN duration INTEGER' + "ALTER TABLE sessions ADD COLUMN duration INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_decision TEXT' + "ALTER TABLE sessions ADD COLUMN video_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_decision TEXT' + "ALTER TABLE sessions ADD COLUMN audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN width INTEGER' + "ALTER TABLE sessions ADD COLUMN width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN height INTEGER' + "ALTER TABLE sessions ADD COLUMN height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN container TEXT' + "ALTER TABLE sessions ADD COLUMN container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_framerate TEXT' + "ALTER TABLE sessions ADD COLUMN video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT' + "ALTER TABLE sessions ADD COLUMN aspect_ratio TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_protocol TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_container TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_width INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_height INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT buffer_count FROM sessions') + c_db.execute("SELECT buffer_count FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER' + "ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT last_paused FROM sessions') + c_db.execute("SELECT last_paused FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN last_paused INTEGER' + "ALTER TABLE sessions ADD COLUMN last_paused INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT section_id FROM sessions') + c_db.execute("SELECT section_id FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN section_id INTEGER' + "ALTER TABLE sessions ADD COLUMN section_id INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT stopped FROM sessions') + c_db.execute("SELECT stopped FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stopped INTEGER' + "ALTER TABLE sessions ADD COLUMN stopped INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_key FROM sessions') + c_db.execute("SELECT transcode_key FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_key TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_key TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT write_attempts FROM sessions') + c_db.execute("SELECT write_attempts FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN write_attempts INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN write_attempts INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_decision FROM sessions') + c_db.execute("SELECT transcode_decision FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_decision TEXT' + "ALTER TABLE sessions ADD COLUMN transcode_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN full_title TEXT' + "ALTER TABLE sessions ADD COLUMN full_title TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN media_index INTEGER' + "ALTER TABLE sessions ADD COLUMN media_index INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_media_index INTEGER' + "ALTER TABLE sessions ADD COLUMN parent_media_index INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN thumb TEXT' + "ALTER TABLE sessions ADD COLUMN thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN parent_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN parent_thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN grandparent_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN grandparent_thumb TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN year INTEGER' + "ALTER TABLE sessions ADD COLUMN year INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT raw_stream_info FROM sessions') + c_db.execute("SELECT raw_stream_info FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN product TEXT' + "ALTER TABLE sessions ADD COLUMN product TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version INTEGER' + "ALTER TABLE sessions ADD COLUMN optimized_version INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version_profile TEXT' + "ALTER TABLE sessions ADD COLUMN optimized_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN synced_version INTEGER' + "ALTER TABLE sessions ADD COLUMN synced_version INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_width INTEGER' + "ALTER TABLE sessions ADD COLUMN video_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN video_height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_codec TEXT' + "ALTER TABLE sessions ADD COLUMN subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN quality_profile TEXT' + "ALTER TABLE sessions ADD COLUMN quality_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_container_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_container_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_container TEXT' + "ALTER TABLE sessions ADD COLUMN stream_container TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_width INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_width INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_video_height INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_framerate TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_bitrate INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_channels INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitles INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitles INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_decision TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_decision TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_codec TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN raw_stream_info TEXT' + "ALTER TABLE sessions ADD COLUMN raw_stream_info TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_height FROM sessions') + c_db.execute("SELECT video_height FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_height INTEGER' + "ALTER TABLE sessions ADD COLUMN video_height INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitles FROM sessions') + c_db.execute("SELECT subtitles FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitles INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitles INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT synced_version_profile FROM sessions') + c_db.execute("SELECT synced_version_profile FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN synced_version_profile TEXT' + "ALTER TABLE sessions ADD COLUMN synced_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN optimized_version_title TEXT' + "ALTER TABLE sessions ADD COLUMN optimized_version_title TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT transcode_hw_decoding FROM sessions') + c_db.execute("SELECT transcode_hw_decoding FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_hw_decoding INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_hw_decoding INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN transcode_hw_encoding INTEGER' + "ALTER TABLE sessions ADD COLUMN transcode_hw_encoding INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT watched FROM sessions') + c_db.execute("SELECT watched FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN watched INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN watched INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT live FROM sessions') + c_db.execute("SELECT live FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN live INTEGER' + "ALTER TABLE sessions ADD COLUMN live INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN live_uuid TEXT' + "ALTER TABLE sessions ADD COLUMN live_uuid TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT session_id FROM sessions') + c_db.execute("SELECT session_id FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN session_id TEXT' + "ALTER TABLE sessions ADD COLUMN session_id TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT original_title FROM sessions') + c_db.execute("SELECT original_title FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN original_title TEXT' + "ALTER TABLE sessions ADD COLUMN original_title TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT secure FROM sessions') + c_db.execute("SELECT secure FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN secure INTEGER' + "ALTER TABLE sessions ADD COLUMN secure INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN relayed INTEGER' + "ALTER TABLE sessions ADD COLUMN relayed INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT rating_key_websocket FROM sessions') + c_db.execute("SELECT rating_key_websocket FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT' + "ALTER TABLE sessions ADD COLUMN rating_key_websocket TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_scan_type FROM sessions') + c_db.execute("SELECT video_scan_type FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_scan_type TEXT' + "ALTER TABLE sessions ADD COLUMN video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_full_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN video_full_resolution TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_scan_type TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_full_resolution TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_full_resolution TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT video_dynamic_range FROM sessions') + c_db.execute("SELECT video_dynamic_range FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN video_dynamic_range TEXT' + "ALTER TABLE sessions ADD COLUMN video_dynamic_range TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_video_dynamic_range TEXT' + "ALTER TABLE sessions ADD COLUMN stream_video_dynamic_range TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT channel_identifier FROM sessions') + c_db.execute("SELECT channel_identifier FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_call_sign TEXT' + "ALTER TABLE sessions ADD COLUMN channel_call_sign TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_identifier TEXT' + "ALTER TABLE sessions ADD COLUMN channel_identifier TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN channel_thumb TEXT' + "ALTER TABLE sessions ADD COLUMN channel_thumb TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT originally_available_at FROM sessions') + c_db.execute("SELECT originally_available_at FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN originally_available_at TEXT' + "ALTER TABLE sessions ADD COLUMN originally_available_at TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN added_at INTEGER' + "ALTER TABLE sessions ADD COLUMN added_at INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT guid FROM sessions') + c_db.execute("SELECT guid FROM sessions") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN guid TEXT' + "ALTER TABLE sessions ADD COLUMN guid TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT bandwidth FROM sessions') + c_db.execute("SELECT bandwidth FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN bandwidth INTEGER' + "ALTER TABLE sessions ADD COLUMN bandwidth INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN location TEXT' + "ALTER TABLE sessions ADD COLUMN location TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT initial_stream FROM sessions') + c_db.execute("SELECT initial_stream FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1' + "ALTER TABLE sessions ADD COLUMN initial_stream INTEGER DEFAULT 1" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT audio_language FROM sessions') + c_db.execute("SELECT audio_language FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_language TEXT' + "ALTER TABLE sessions ADD COLUMN audio_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN audio_language_code TEXT' + "ALTER TABLE sessions ADD COLUMN audio_language_code TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_language TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_audio_language_code TEXT' + "ALTER TABLE sessions ADD COLUMN stream_audio_language_code TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitle_language FROM sessions') + c_db.execute("SELECT subtitle_language FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_language TEXT' + "ALTER TABLE sessions ADD COLUMN subtitle_language TEXT" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_language TEXT' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_language TEXT" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT subtitle_forced FROM sessions') + c_db.execute("SELECT subtitle_forced FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN subtitle_forced INTEGER' + "ALTER TABLE sessions ADD COLUMN subtitle_forced INTEGER" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER' + "ALTER TABLE sessions ADD COLUMN stream_subtitle_forced INTEGER" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT intro FROM sessions') + c_db.execute("SELECT intro FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN intro INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN credits INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT commercial FROM sessions') + c_db.execute("SELECT commercial FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN commercial INTEGER DEFAULT 0" ) # Upgrade sessions table from earlier versions try: - c_db.execute('SELECT marker FROM sessions') + c_db.execute("SELECT marker FROM sessions") except sqlite3.OperationalError: logger.debug(u"Altering database. Updating database table sessions.") c_db.execute( - 'ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0' + "ALTER TABLE sessions ADD COLUMN marker INTEGER DEFAULT 0" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT reference_id FROM session_history') + c_db.execute("SELECT reference_id FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' + "ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0" ) # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id c_db.execute( - 'UPDATE session_history ' \ - 'SET reference_id = (SELECT (CASE \ + "UPDATE session_history " \ + "SET reference_id = (SELECT (CASE \ WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ SELECT MAX(id) FROM session_history \ - WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ - 'FROM session_history AS t1 ' \ - 'WHERE t1.id = session_history.id) ' + WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) " \ + "FROM session_history AS t1 " \ + "WHERE t1.id = session_history.id) " ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT bandwidth FROM session_history') + c_db.execute("SELECT bandwidth FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN platform_version TEXT' + "ALTER TABLE session_history ADD COLUMN platform_version TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN product TEXT' + "ALTER TABLE session_history ADD COLUMN product TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN product_version TEXT' + "ALTER TABLE session_history ADD COLUMN product_version TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN profile TEXT' + "ALTER TABLE session_history ADD COLUMN profile TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN bandwidth INTEGER' + "ALTER TABLE session_history ADD COLUMN bandwidth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN location TEXT' + "ALTER TABLE session_history ADD COLUMN location TEXT" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN quality_profile TEXT' + "ALTER TABLE session_history ADD COLUMN quality_profile TEXT" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT secure FROM session_history') + c_db.execute("SELECT secure FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN secure INTEGER' + "ALTER TABLE session_history ADD COLUMN secure INTEGER" ) c_db.execute( - 'ALTER TABLE session_history ADD COLUMN relayed INTEGER' + "ALTER TABLE session_history ADD COLUMN relayed INTEGER" ) # Upgrade session_history table from earlier versions try: - result = c_db.execute('SELECT platform FROM session_history ' - 'WHERE platform = "windows"').fetchall() + result = c_db.execute("SELECT platform FROM session_history " + "WHERE platform = 'windows'").fetchall() if len(result) > 0: logger.debug("Altering database. Capitalizing Windows platform values in session_history table.") c_db.execute( - 'UPDATE session_history SET platform = "Windows" WHERE platform = "windows" ' + "UPDATE session_history SET platform = 'Windows' WHERE platform = 'windows' " ) except sqlite3.OperationalError: logger.warn("Unable to capitalize Windows platform values in session_history table.") # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT full_title FROM session_history_metadata') + c_db.execute("SELECT full_title FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN full_title TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT tagline FROM session_history_metadata') + c_db.execute("SELECT tagline FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT labels FROM session_history_metadata') + c_db.execute("SELECT labels FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN labels TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN labels TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT original_title FROM session_history_metadata') + c_db.execute("SELECT original_title FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN original_title TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN original_title TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT live FROM session_history_metadata') + c_db.execute("SELECT live FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN live INTEGER DEFAULT 0' + "ALTER TABLE session_history_metadata ADD COLUMN live INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_call_sign TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_call_sign TEXT" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_identifier TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_identifier TEXT" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT' + "ALTER TABLE session_history_metadata ADD COLUMN channel_thumb TEXT" ) # Upgrade session_history_metadata table from earlier versions try: - c_db.execute('SELECT marker_credits_first FROM session_history_metadata') + c_db.execute("SELECT marker_credits_first FROM session_history_metadata") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_metadata.") c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL' + "ALTER TABLE session_history_metadata ADD COLUMN marker_credits_first INTEGER DEFAULT NULL" ) c_db.execute( - 'ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL' + "ALTER TABLE session_history_metadata ADD COLUMN marker_credits_final INTEGER DEFAULT NULL" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT transcode_decision FROM session_history_media_info') + c_db.execute("SELECT transcode_decision FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_decision TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET transcode_decision = (CASE ' - 'WHEN video_decision = "transcode" OR audio_decision = "transcode" THEN "transcode" ' - 'WHEN video_decision = "copy" OR audio_decision = "copy" THEN "copy" ' - 'WHEN video_decision = "direct play" OR audio_decision = "direct play" THEN "direct play" END)' + "UPDATE session_history_media_info SET transcode_decision = (CASE " + "WHEN video_decision = 'transcode' OR audio_decision = 'transcode' THEN 'transcode' " + "WHEN video_decision = 'copy' OR audio_decision = 'copy' THEN 'copy' " + "WHEN video_decision = 'direct play' OR audio_decision = 'direct play' THEN 'direct play' END)" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitles FROM session_history_media_info') + c_db.execute("SELECT subtitles FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_bit_depth INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_bit_depth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_codec_level TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_codec_level TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_width INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_width INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_height INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN video_height INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_requested INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_requested INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_full_pipeline INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_full_pipeline INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decode_title TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encode_title TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_container TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_container TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_container_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_container_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec_level TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_codec_level TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_bit_depth INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_bit_depth INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_height INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_height INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_width INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_width INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_resolution TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_framerate TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_framerate TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_bitrate INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_bitrate INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_channels INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_channels INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_decision TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_decision TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_codec TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_container TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_container TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_forced INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_forced INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitles INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN subtitles INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN synced_version INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN synced_version INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version_profile TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version_profile TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, "p", "")' + "UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, 'p', '')" ) c_db.execute( - 'UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, "SD", "sd")' + "UPDATE session_history_media_info SET video_resolution=REPLACE(video_resolution, 'SD', 'sd')" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_codec FROM session_history_media_info') + c_db.execute("SELECT subtitle_codec FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_codec TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_codec TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT synced_version_profile FROM session_history_media_info') + c_db.execute("SELECT synced_version_profile FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN synced_version_profile TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN synced_version_profile TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN optimized_version_title TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN optimized_version_title TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT transcode_hw_decoding FROM session_history_media_info') + c_db.execute("SELECT transcode_hw_decoding FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decoding INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_decoding INTEGER" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encoding INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN transcode_hw_encoding INTEGER" ) c_db.execute( - 'UPDATE session_history_media_info SET subtitle_codec = "" WHERE subtitle_codec IS NULL' + "UPDATE session_history_media_info SET subtitle_codec = '' WHERE subtitle_codec IS NULL" ) # Upgrade session_history_media_info table from earlier versions try: - result = c_db.execute('SELECT stream_container FROM session_history_media_info ' - 'WHERE stream_container IS NULL').fetchall() + result = c_db.execute("SELECT stream_container FROM session_history_media_info " + "WHERE stream_container IS NULL").fetchall() if len(result) > 0: logger.debug("Altering database. Removing NULL values from session_history_media_info table.") c_db.execute( - 'UPDATE session_history_media_info SET stream_container = "" WHERE stream_container IS NULL' + "UPDATE session_history_media_info SET stream_container = '' WHERE stream_container IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_codec = "" WHERE stream_video_codec IS NULL' + "UPDATE session_history_media_info SET stream_video_codec = '' WHERE stream_video_codec IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_audio_codec = "" WHERE stream_audio_codec IS NULL' + "UPDATE session_history_media_info SET stream_audio_codec = '' WHERE stream_audio_codec IS NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_subtitle_codec = "" WHERE stream_subtitle_codec IS NULL' + "UPDATE session_history_media_info SET stream_subtitle_codec = '' WHERE stream_subtitle_codec IS NULL" ) except sqlite3.OperationalError: logger.warn("Unable to remove NULL values from session_history_media_info table.") # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT video_scan_type FROM session_history_media_info') + c_db.execute("SELECT video_scan_type FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_full_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_full_resolution TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_scan_type TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_scan_type TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_full_resolution TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_full_resolution TEXT" ) c_db.execute( - 'UPDATE session_history_media_info SET video_scan_type = "progressive" ' - 'WHERE video_resolution != ""' + "UPDATE session_history_media_info SET video_scan_type = 'progressive' " + "WHERE video_resolution != ''" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_scan_type = "progressive" ' - 'WHERE stream_video_resolution != "" AND stream_video_resolution IS NOT NULL' + "UPDATE session_history_media_info SET stream_video_scan_type = 'progressive' " + "WHERE stream_video_resolution != '' AND stream_video_resolution IS NOT NULL" ) c_db.execute( - 'UPDATE session_history_media_info SET video_full_resolution = (CASE ' - 'WHEN video_resolution = "" OR video_resolution = "SD" OR video_resolution = "4k" THEN video_resolution ' - 'WHEN video_resolution = "sd" THEN "SD" ' - 'ELSE video_resolution || "p" END)' + "UPDATE session_history_media_info SET video_full_resolution = (CASE " + "WHEN video_resolution = '' OR video_resolution = 'SD' OR video_resolution = '4k' THEN video_resolution " + "WHEN video_resolution = 'sd' THEN 'SD' " + "ELSE video_resolution || 'p' END)" ) c_db.execute( - 'UPDATE session_history_media_info SET stream_video_full_resolution = ( ' - 'CASE WHEN stream_video_resolution = "" OR stream_video_resolution = "SD" OR stream_video_resolution = "4k" ' - 'THEN stream_video_resolution ' - 'WHEN stream_video_resolution = "sd" THEN "SD" ' - 'ELSE stream_video_resolution || "p" END)' + "UPDATE session_history_media_info SET stream_video_full_resolution = ( " + "CASE WHEN stream_video_resolution = '' OR stream_video_resolution = 'SD' OR stream_video_resolution = '4k' " + "THEN stream_video_resolution " + "WHEN stream_video_resolution = 'sd' THEN 'SD' " + "ELSE stream_video_resolution || 'p' END)" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT video_dynamic_range FROM session_history_media_info') + c_db.execute("SELECT video_dynamic_range FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN video_dynamic_range TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN video_dynamic_range TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_video_dynamic_range TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_video_dynamic_range TEXT" ) - result = c_db.execute('SELECT * FROM session_history_media_info ' - 'WHERE video_dynamic_range = "SDR" AND stream_video_dynamic_range = "HDR"').fetchone() + result = c_db.execute("SELECT * FROM session_history_media_info " + "WHERE video_dynamic_range = 'SDR' AND stream_video_dynamic_range = 'HDR'").fetchone() if result: c_db.execute( - 'UPDATE session_history_media_info SET stream_video_dynamic_range = "SDR" ' - 'WHERE video_dynamic_range = "SDR" AND stream_video_dynamic_range = "HDR"' + "UPDATE session_history_media_info SET stream_video_dynamic_range = 'SDR' " + "WHERE video_dynamic_range = 'SDR' AND stream_video_dynamic_range = 'HDR'" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT audio_language FROM session_history_media_info') + c_db.execute("SELECT audio_language FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN audio_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN audio_language_code TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN audio_language_code TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language_code TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_audio_language_code TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_language FROM session_history_media_info') + c_db.execute("SELECT subtitle_language FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_language TEXT" ) c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_language TEXT' + "ALTER TABLE session_history_media_info ADD COLUMN stream_subtitle_language TEXT" ) # Upgrade session_history_media_info table from earlier versions try: - c_db.execute('SELECT subtitle_forced FROM session_history_media_info') + c_db.execute("SELECT subtitle_forced FROM session_history_media_info") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history_media_info.") c_db.execute( - 'ALTER TABLE session_history_media_info ADD COLUMN subtitle_forced INTEGER' + "ALTER TABLE session_history_media_info ADD COLUMN subtitle_forced INTEGER" ) # Upgrade session_history table from earlier versions try: - c_db.execute('SELECT section_id FROM session_history') + c_db.execute("SELECT section_id FROM session_history") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table session_history.") c_db.execute( - 'ALTER TABLE session_history ADD COLUMN section_id INTEGER' + "ALTER TABLE session_history ADD COLUMN section_id INTEGER" ) c_db.execute( - 'UPDATE session_history SET section_id = (' - 'SELECT section_id FROM session_history_metadata ' - 'WHERE session_history_metadata.id = session_history.id)' + "UPDATE session_history SET section_id = (" + "SELECT section_id FROM session_history_metadata " + "WHERE session_history_metadata.id = session_history.id)" ) c_db.execute( - 'CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, ' - 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, ' - 'media_index INTEGER, parent_media_index INTEGER, ' - 'thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' - 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' - 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' - 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, ' - 'labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)' + "CREATE TABLE IF NOT EXISTS session_history_metadata_temp (id INTEGER PRIMARY KEY, " + "rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "title TEXT, parent_title TEXT, grandparent_title TEXT, original_title TEXT, full_title TEXT, " + "media_index INTEGER, parent_media_index INTEGER, " + "thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, " + "art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, " + "last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, " + "duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT, " + "labels TEXT, live INTEGER DEFAULT 0, channel_call_sign TEXT, channel_identifier TEXT, channel_thumb TEXT)" ) c_db.execute( - 'INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb) ' - 'SELECT id, rating_key, parent_rating_key, grandparent_rating_key, ' - 'title, parent_title, grandparent_title, original_title, full_title, ' - 'media_index, parent_media_index, ' - 'thumb, parent_thumb, grandparent_thumb, ' - 'art, media_type, year, originally_available_at, added_at, updated_at, ' - 'last_viewed_at, content_rating, summary, tagline, rating, ' - 'duration, guid, directors, writers, actors, genres, studio, ' - 'labels, live, channel_call_sign, channel_identifier, channel_thumb ' - 'FROM session_history_metadata' + "INSERT INTO session_history_metadata_temp (id, rating_key, parent_rating_key, grandparent_rating_key, " + "title, parent_title, grandparent_title, original_title, full_title, " + "media_index, parent_media_index, " + "thumb, parent_thumb, grandparent_thumb, " + "art, media_type, year, originally_available_at, added_at, updated_at, " + "last_viewed_at, content_rating, summary, tagline, rating, " + "duration, guid, directors, writers, actors, genres, studio, " + "labels, live, channel_call_sign, channel_identifier, channel_thumb) " + "SELECT id, rating_key, parent_rating_key, grandparent_rating_key, " + "title, parent_title, grandparent_title, original_title, full_title, " + "media_index, parent_media_index, " + "thumb, parent_thumb, grandparent_thumb, " + "art, media_type, year, originally_available_at, added_at, updated_at, " + "last_viewed_at, content_rating, summary, tagline, rating, " + "duration, guid, directors, writers, actors, genres, studio, " + "labels, live, channel_call_sign, channel_identifier, channel_thumb " + "FROM session_history_metadata" ) c_db.execute( - 'DROP TABLE session_history_metadata' + "DROP TABLE session_history_metadata" ) c_db.execute( - 'ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata' + "ALTER TABLE session_history_metadata_temp RENAME TO session_history_metadata" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT do_notify FROM users') + c_db.execute("SELECT do_notify FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN do_notify INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT keep_history FROM users') + c_db.execute("SELECT keep_history FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT custom_avatar_url FROM users') + c_db.execute("SELECT custom_avatar_url FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT' + "ALTER TABLE users ADD COLUMN custom_avatar_url TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT deleted_user FROM users') + c_db.execute("SELECT deleted_user FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT allow_guest FROM users') + c_db.execute("SELECT allow_guest FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN allow_guest INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN allow_guest INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN user_token TEXT' + "ALTER TABLE users ADD COLUMN user_token TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN server_token TEXT' + "ALTER TABLE users ADD COLUMN server_token TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT shared_libraries FROM users') + c_db.execute("SELECT shared_libraries FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN shared_libraries TEXT' + "ALTER TABLE users ADD COLUMN shared_libraries TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_all TEXT' + "ALTER TABLE users ADD COLUMN filter_all TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_movies TEXT' + "ALTER TABLE users ADD COLUMN filter_movies TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_tv TEXT' + "ALTER TABLE users ADD COLUMN filter_tv TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_music TEXT' + "ALTER TABLE users ADD COLUMN filter_music TEXT" ) c_db.execute( - 'ALTER TABLE users ADD COLUMN filter_photos TEXT' + "ALTER TABLE users ADD COLUMN filter_photos TEXT" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT is_admin FROM users') + c_db.execute("SELECT is_admin FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0' + "ALTER TABLE users ADD COLUMN is_admin INTEGER DEFAULT 0" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT is_active FROM users') + c_db.execute("SELECT is_active FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1' + "ALTER TABLE users ADD COLUMN is_active INTEGER DEFAULT 1" ) # Upgrade users table from earlier versions try: - c_db.execute('SELECT title FROM users') + c_db.execute("SELECT title FROM users") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table users.") c_db.execute( - 'ALTER TABLE users ADD COLUMN title TEXT' + "ALTER TABLE users ADD COLUMN title TEXT" ) try: - result = c_db.execute('SELECT * FROM users WHERE friendly_name = username').fetchall() + result = c_db.execute("SELECT * FROM users WHERE friendly_name = username").fetchall() if result: logger.debug("Altering database. Resetting user friendly names equal to username.") - c_db.execute('UPDATE users SET friendly_name = NULL WHERE friendly_name = username') + c_db.execute("UPDATE users SET friendly_name = NULL WHERE friendly_name = username") except sqlite3.OperationalError: pass # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT poster_url FROM notify_log') + c_db.execute("SELECT poster_url FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN poster_url TEXT' + "ALTER TABLE notify_log ADD COLUMN poster_url TEXT" ) # Upgrade notify_log table from earlier versions (populate table with data from notify_log) try: - c_db.execute('SELECT timestamp FROM notify_log') + c_db.execute("SELECT timestamp FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, ' - 'session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' - 'user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, ' - 'subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)' + "CREATE TABLE IF NOT EXISTS notify_log_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, timestamp INTEGER, " + "session_key INTEGER, rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, " + "user_id INTEGER, user TEXT, agent_id INTEGER, agent_name TEXT, notify_action TEXT, " + "subject_text TEXT, body_text TEXT, script_args TEXT, poster_url TEXT)" ) c_db.execute( - 'INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, ' - 'poster_url, timestamp, notify_action) ' - 'SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, ' - 'notify_action FROM notify_log_temp ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_play, "play" FROM notify_log WHERE on_play ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_stop, "stop" FROM notify_log WHERE on_stop ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_watched, "watched" FROM notify_log WHERE on_watched ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_pause, "pause" FROM notify_log WHERE on_pause ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_resume, "resume" FROM notify_log WHERE on_resume ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_buffer, "buffer" FROM notify_log WHERE on_buffer ' - 'UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, ' - 'on_created, "created" FROM notify_log WHERE on_created ' - 'ORDER BY timestamp ') + "INSERT INTO notify_log_temp (session_key, rating_key, user_id, user, agent_id, agent_name, " + "poster_url, timestamp, notify_action) " + "SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, timestamp, " + "notify_action FROM notify_log_temp " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_play, 'play' FROM notify_log WHERE on_play " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_stop, 'stop' FROM notify_log WHERE on_stop " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_watched, 'watched' FROM notify_log WHERE on_watched " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_pause, 'pause' FROM notify_log WHERE on_pause " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_resume, 'resume' FROM notify_log WHERE on_resume " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_buffer, 'buffer' FROM notify_log WHERE on_buffer " + "UNION ALL SELECT session_key, rating_key, user_id, user, agent_id, agent_name, poster_url, " + "on_created, 'created' FROM notify_log WHERE on_created " + "ORDER BY timestamp ") c_db.execute( - 'DROP TABLE notify_log' + "DROP TABLE notify_log" ) c_db.execute( - 'ALTER TABLE notify_log_temp RENAME TO notify_log' + "ALTER TABLE notify_log_temp RENAME TO notify_log" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT notifier_id FROM notify_log') + c_db.execute("SELECT notifier_id FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN notifier_id INTEGER' + "ALTER TABLE notify_log ADD COLUMN notifier_id INTEGER" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT success FROM notify_log') + c_db.execute("SELECT success FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN success INTEGER DEFAULT 0' + "ALTER TABLE notify_log ADD COLUMN success INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE notify_log SET success = 1' + "UPDATE notify_log SET success = 1" ) # Upgrade notify_log table from earlier versions try: - c_db.execute('SELECT tag FROM notify_log') + c_db.execute("SELECT tag FROM notify_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notify_log.") c_db.execute( - 'ALTER TABLE notify_log ADD COLUMN tag TEXT' + "ALTER TABLE notify_log ADD COLUMN tag TEXT" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT start_time FROM newsletter_log') + c_db.execute("SELECT start_time FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN start_time INTEGER' + "ALTER TABLE newsletter_log ADD COLUMN start_time INTEGER" ) c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN end_time INTEGER' + "ALTER TABLE newsletter_log ADD COLUMN end_time INTEGER" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT filename FROM newsletter_log') + c_db.execute("SELECT filename FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN filename TEXT' + "ALTER TABLE newsletter_log ADD COLUMN filename TEXT" ) # Upgrade newsletter_log table from earlier versions try: - c_db.execute('SELECT email_msg_id FROM newsletter_log') + c_db.execute("SELECT email_msg_id FROM newsletter_log") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletter_log.") c_db.execute( - 'ALTER TABLE newsletter_log ADD COLUMN email_msg_id TEXT' + "ALTER TABLE newsletter_log ADD COLUMN email_msg_id TEXT" ) # Upgrade newsletters table from earlier versions try: - c_db.execute('SELECT id_name FROM newsletters') + c_db.execute("SELECT id_name FROM newsletters") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table newsletters.") c_db.execute( - 'ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL' + "ALTER TABLE newsletters ADD COLUMN id_name TEXT NOT NULL" ) # Upgrade newsletters table from earlier versions try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="newsletters"').fetchone() - if 'TEXT NOT NULL DEFAULT "0 0 * * 0"' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='newsletters'").fetchone() + if "TEXT NOT NULL DEFAULT \"0 0 * * 0\"" in result[0]: logger.debug("Altering database. Updating default cron value in newsletters table.") c_db.execute( - 'CREATE TABLE newsletters_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, ' - 'friendly_name TEXT, newsletter_config TEXT, email_config TEXT, ' - 'subject TEXT, body TEXT, message TEXT, ' - 'cron TEXT NOT NULL DEFAULT \'0 0 * * 0\', active INTEGER DEFAULT 0)' + "CREATE TABLE newsletters_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "agent_id INTEGER, agent_name TEXT, agent_label TEXT, id_name TEXT NOT NULL, " + "friendly_name TEXT, newsletter_config TEXT, email_config TEXT, " + "subject TEXT, body TEXT, message TEXT, " + "cron TEXT NOT NULL DEFAULT '0 0 * * 0', active INTEGER DEFAULT 0)" ) c_db.execute( - 'INSERT INTO newsletters_temp (id, agent_id, agent_name, agent_label, id_name, ' - 'friendly_name, newsletter_config, email_config, subject, body, message, cron, active) ' - 'SELECT id, agent_id, agent_name, agent_label, id_name, ' - 'friendly_name, newsletter_config, email_config, subject, body, message, cron, active ' - 'FROM newsletters' + "INSERT INTO newsletters_temp (id, agent_id, agent_name, agent_label, id_name, " + "friendly_name, newsletter_config, email_config, subject, body, message, cron, active) " + "SELECT id, agent_id, agent_name, agent_label, id_name, " + "friendly_name, newsletter_config, email_config, subject, body, message, cron, active " + "FROM newsletters" ) c_db.execute( - 'DROP TABLE newsletters' + "DROP TABLE newsletters" ) c_db.execute( - 'ALTER TABLE newsletters_temp RENAME TO newsletters' + "ALTER TABLE newsletters_temp RENAME TO newsletters" ) except sqlite3.OperationalError: logger.warn("Unable to update default cron value in newsletters table.") try: c_db.execute( - 'DROP TABLE newsletters_temp' + "DROP TABLE newsletters_temp" ) except: pass # Upgrade library_sections table from earlier versions (remove UNIQUE constraint on section_id) try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="library_sections"').fetchone() - if 'section_id INTEGER UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='library_sections'").fetchone() + if "section_id INTEGER UNIQUE" in result[0]: logger.debug("Altering database. Removing unique constraint on section_id from library_sections table.") c_db.execute( - 'CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, ' - 'thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, ' - 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' - 'deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))' + "CREATE TABLE library_sections_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "server_id TEXT, section_id INTEGER, section_name TEXT, section_type TEXT, " + "thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, " + "do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, " + "deleted_section INTEGER DEFAULT 0, UNIQUE(server_id, section_id))" ) c_db.execute( - 'INSERT INTO library_sections_temp (id, server_id, section_id, section_name, section_type, ' - 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' - 'keep_history, deleted_section) ' - 'SELECT id, server_id, section_id, section_name, section_type, ' - 'thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, ' - 'keep_history, deleted_section ' - 'FROM library_sections' + "INSERT INTO library_sections_temp (id, server_id, section_id, section_name, section_type, " + "thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, " + "keep_history, deleted_section) " + "SELECT id, server_id, section_id, section_name, section_type, " + "thumb, custom_thumb_url, art, count, parent_count, child_count, do_notify, do_notify_created, " + "keep_history, deleted_section " + "FROM library_sections" ) c_db.execute( - 'DROP TABLE library_sections' + "DROP TABLE library_sections" ) c_db.execute( - 'ALTER TABLE library_sections_temp RENAME TO library_sections' + "ALTER TABLE library_sections_temp RENAME TO library_sections" ) except sqlite3.OperationalError: logger.warn("Unable to remove section_id unique constraint from library_sections.") try: c_db.execute( - 'DROP TABLE library_sections_temp' + "DROP TABLE library_sections_temp" ) except: pass # Upgrade library_sections table from earlier versions (remove duplicated libraries) try: - result = c_db.execute('SELECT * FROM library_sections WHERE server_id = ""').fetchall() + result = c_db.execute("SELECT * FROM library_sections WHERE server_id = ''").fetchall() if len(result) > 0: logger.debug("Altering database. Removing duplicate libraries from library_sections table.") c_db.execute( - 'DELETE FROM library_sections WHERE server_id = ""' + "DELETE FROM library_sections WHERE server_id = ''" ) except sqlite3.OperationalError: logger.warn("Unable to remove duplicate libraries from library_sections table.") # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT agent FROM library_sections') + c_db.execute("SELECT agent FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN agent TEXT' + "ALTER TABLE library_sections ADD COLUMN agent TEXT" ) # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT custom_art_url FROM library_sections') + c_db.execute("SELECT custom_art_url FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN custom_art_url TEXT' + "ALTER TABLE library_sections ADD COLUMN custom_art_url TEXT" ) # Upgrade library_sections table from earlier versions try: - c_db.execute('SELECT is_active FROM library_sections') + c_db.execute("SELECT is_active FROM library_sections") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table library_sections.") c_db.execute( - 'ALTER TABLE library_sections ADD COLUMN is_active INTEGER DEFAULT 1' + "ALTER TABLE library_sections ADD COLUMN is_active INTEGER DEFAULT 1" ) # Upgrade library_sections table from earlier versions try: - result = c_db.execute('SELECT thumb, art FROM library_sections WHERE section_id = ?', + result = c_db.execute("SELECT thumb, art FROM library_sections WHERE section_id = ?", [common.LIVE_TV_SECTION_ID]).fetchone() if result and (not result[0] or not result[1]): logger.debug("Altering database. Updating database table library_sections.") - c_db.execute('UPDATE library_sections SET thumb = ?, art =? WHERE section_id = ?', + c_db.execute("UPDATE library_sections SET thumb = ?, art =? WHERE section_id = ?", [common.DEFAULT_LIVE_TV_THUMB, common.DEFAULT_LIVE_TV_ART_FULL, common.LIVE_TV_SECTION_ID]) @@ -2266,49 +2269,49 @@ def dbcheck(): # Upgrade users table from earlier versions (remove UNIQUE constraint on username) try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="users"').fetchone() - if 'username TEXT NOT NULL UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='users'").fetchone() + if "username TEXT NOT NULL UNIQUE" in result[0]: logger.debug("Altering database. Removing unique constraint on username from users table.") c_db.execute( - 'CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, ' - 'thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, ' - 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, ' - 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)' + "CREATE TABLE users_temp (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL, friendly_name TEXT, " + "thumb TEXT, custom_avatar_url TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, " + "is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL, do_notify INTEGER DEFAULT 1, " + "keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)" ) c_db.execute( - 'INSERT INTO users_temp (id, user_id, username, friendly_name, thumb, custom_avatar_url, ' - 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user) ' - 'SELECT id, user_id, username, friendly_name, thumb, custom_avatar_url, ' - 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user ' - 'FROM users' + "INSERT INTO users_temp (id, user_id, username, friendly_name, thumb, custom_avatar_url, " + "email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user) " + "SELECT id, user_id, username, friendly_name, thumb, custom_avatar_url, " + "email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history, deleted_user " + "FROM users" ) c_db.execute( - 'DROP TABLE users' + "DROP TABLE users" ) c_db.execute( - 'ALTER TABLE users_temp RENAME TO users' + "ALTER TABLE users_temp RENAME TO users" ) except sqlite3.OperationalError: logger.warn("Unable to remove username unique constraint from users.") try: c_db.execute( - 'DROP TABLE users_temp' + "DROP TABLE users_temp" ) except: pass # Upgrade mobile_devices table from earlier versions try: - result = c_db.execute('SELECT SQL FROM sqlite_master WHERE type="table" AND name="mobile_devices"').fetchone() - if 'device_token TEXT NOT NULL UNIQUE' in result[0]: + result = c_db.execute("SELECT SQL FROM sqlite_master WHERE type='table' AND name='mobile_devices'").fetchone() + if "device_token TEXT NOT NULL UNIQUE" in result[0]: logger.debug("Altering database. Dropping and recreating mobile_devices table.") c_db.execute( - 'DROP TABLE mobile_devices' + "DROP TABLE mobile_devices" ) c_db.execute( - 'CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT)' + "CREATE TABLE IF NOT EXISTS mobile_devices (id INTEGER PRIMARY KEY AUTOINCREMENT, " + "device_id TEXT NOT NULL UNIQUE, device_token TEXT, device_name TEXT, friendly_name TEXT)" ) except sqlite3.OperationalError: logger.warn("Failed to recreate mobile_devices table.") @@ -2316,408 +2319,408 @@ def dbcheck(): # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT last_seen FROM mobile_devices') + c_db.execute("SELECT last_seen FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER' + "ALTER TABLE mobile_devices ADD COLUMN last_seen INTEGER" ) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT official FROM mobile_devices') + c_db.execute("SELECT official FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN official INTEGER DEFAULT 0' + "ALTER TABLE mobile_devices ADD COLUMN official INTEGER DEFAULT 0" ) # Update official mobile device flag - for device_id, in c_db.execute('SELECT device_id FROM mobile_devices').fetchall(): - c_db.execute('UPDATE mobile_devices SET official = ? WHERE device_id = ?', + for device_id, in c_db.execute("SELECT device_id FROM mobile_devices").fetchall(): + c_db.execute("UPDATE mobile_devices SET official = ? WHERE device_id = ?", [mobile_app.validate_onesignal_id(device_id), device_id]) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT onesignal_id FROM mobile_devices') + c_db.execute("SELECT onesignal_id FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN onesignal_id TEXT' + "ALTER TABLE mobile_devices ADD COLUMN onesignal_id TEXT" ) # Upgrade mobile_devices table from earlier versions try: - c_db.execute('SELECT platform FROM mobile_devices') + c_db.execute("SELECT platform FROM mobile_devices") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table mobile_devices.") c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN platform TEXT' + "ALTER TABLE mobile_devices ADD COLUMN platform TEXT" ) c_db.execute( - 'ALTER TABLE mobile_devices ADD COLUMN version TEXT' + "ALTER TABLE mobile_devices ADD COLUMN version TEXT" ) # Update mobile device platforms for device_id, in c_db.execute( - 'SELECT device_id FROM mobile_devices WHERE official > 0').fetchall(): - c_db.execute('UPDATE mobile_devices SET platform = ? WHERE device_id = ?', - ['android', device_id]) + "SELECT device_id FROM mobile_devices WHERE official > 0").fetchall(): + c_db.execute("UPDATE mobile_devices SET platform = ? WHERE device_id = ?", + ["android", device_id]) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT custom_conditions FROM notifiers') + c_db.execute("SELECT custom_conditions FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN custom_conditions TEXT' + "ALTER TABLE notifiers ADD COLUMN custom_conditions TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN custom_conditions_logic TEXT' + "ALTER TABLE notifiers ADD COLUMN custom_conditions_logic TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_change FROM notifiers') + c_db.execute("SELECT on_change FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_change INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_change_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_change_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_change_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_plexpydbcorrupt FROM notifiers') + c_db.execute("SELECT on_plexpydbcorrupt FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_plexpydbcorrupt_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_error FROM notifiers') + c_db.execute("SELECT on_error FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_error INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_error_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_error_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_error_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_intro FROM notifiers') + c_db.execute("SELECT on_intro FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_intro INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_intro_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_intro_body TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_credits INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_credits_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_credits_body TEXT" ) # Upgrade notifiers table from earlier versions try: - c_db.execute('SELECT on_commercial FROM notifiers') + c_db.execute("SELECT on_commercial FROM notifiers") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table notifiers.") c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0' + "ALTER TABLE notifiers ADD COLUMN on_commercial INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT' + "ALTER TABLE notifiers ADD COLUMN on_commercial_subject TEXT" ) c_db.execute( - 'ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT' + "ALTER TABLE notifiers ADD COLUMN on_commercial_body TEXT" ) # Upgrade tvmaze_lookup table from earlier versions try: - c_db.execute('SELECT rating_key FROM tvmaze_lookup') + c_db.execute("SELECT rating_key FROM tvmaze_lookup") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table tvmaze_lookup.") c_db.execute( - 'ALTER TABLE tvmaze_lookup ADD COLUMN rating_key INTEGER' + "ALTER TABLE tvmaze_lookup ADD COLUMN rating_key INTEGER" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_tvmaze_lookup_thetvdb_id' + "DROP INDEX IF EXISTS idx_tvmaze_lookup_thetvdb_id" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_tvmaze_lookup_imdb_id' + "DROP INDEX IF EXISTS idx_tvmaze_lookup_imdb_id" ) # Upgrade themoviedb_lookup table from earlier versions try: - c_db.execute('SELECT rating_key FROM themoviedb_lookup') + c_db.execute("SELECT rating_key FROM themoviedb_lookup") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table themoviedb_lookup.") c_db.execute( - 'ALTER TABLE themoviedb_lookup ADD COLUMN rating_key INTEGER' + "ALTER TABLE themoviedb_lookup ADD COLUMN rating_key INTEGER" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_themoviedb_lookup_thetvdb_id' + "DROP INDEX IF EXISTS idx_themoviedb_lookup_thetvdb_id" ) c_db.execute( - 'DROP INDEX IF EXISTS idx_themoviedb_lookup_imdb_id' + "DROP INDEX IF EXISTS idx_themoviedb_lookup_imdb_id" ) # Upgrade user_login table from earlier versions try: - c_db.execute('SELECT success FROM user_login') + c_db.execute("SELECT success FROM user_login") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table user_login.") c_db.execute( - 'ALTER TABLE user_login ADD COLUMN success INTEGER DEFAULT 1' + "ALTER TABLE user_login ADD COLUMN success INTEGER DEFAULT 1" ) # Upgrade user_login table from earlier versions try: - c_db.execute('SELECT expiry FROM user_login') + c_db.execute("SELECT expiry FROM user_login") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table user_login.") c_db.execute( - 'ALTER TABLE user_login ADD COLUMN expiry TEXT' + "ALTER TABLE user_login ADD COLUMN expiry TEXT" ) c_db.execute( - 'ALTER TABLE user_login ADD COLUMN jwt_token TEXT' + "ALTER TABLE user_login ADD COLUMN jwt_token TEXT" ) # Rename notifiers in the database - result = c_db.execute('SELECT agent_label FROM notifiers ' - 'WHERE agent_label = "XBMC" ' - 'OR agent_label = "OSX Notify" ' - 'OR agent_name = "androidapp"').fetchone() + result = c_db.execute("SELECT agent_label FROM notifiers " + "WHERE agent_label = 'XBMC' " + "OR agent_label = 'OSX Notify' " + "OR agent_name = 'androidapp'").fetchone() if result: logger.debug("Altering database. Renaming notifiers.") c_db.execute( - 'UPDATE notifiers SET agent_label = "Kodi" WHERE agent_label = "XBMC"' + "UPDATE notifiers SET agent_label = 'Kodi' WHERE agent_label = 'XBMC'" ) c_db.execute( - 'UPDATE notifiers SET agent_label = "macOS Notification Center" WHERE agent_label = "OSX Notify"' + "UPDATE notifiers SET agent_label = 'macOS Notification Center' WHERE agent_label = 'OSX Notify'" ) c_db.execute( - 'UPDATE notifiers SET agent_name = "remoteapp", agent_label = "Tautulli Remote App" ' - 'WHERE agent_name = "androidapp"' + "UPDATE notifiers SET agent_name = 'remoteapp', agent_label = 'Tautulli Remote App' " + "WHERE agent_name = 'androidapp'" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT thumb_level FROM exports') + c_db.execute("SELECT thumb_level FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN thumb_level INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN thumb_level INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE exports SET thumb_level = 9 WHERE include_thumb = 1' + "UPDATE exports SET thumb_level = 9 WHERE include_thumb = 1" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN art_level INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN art_level INTEGER DEFAULT 0" ) c_db.execute( - 'UPDATE exports SET art_level = 9 WHERE include_art = 1' + "UPDATE exports SET art_level = 9 WHERE include_art = 1" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT title FROM exports') + c_db.execute("SELECT title FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN title TEXT' + "ALTER TABLE exports ADD COLUMN title TEXT" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN individual_files INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN individual_files INTEGER DEFAULT 0" ) # Upgrade exports table from earlier versions try: - c_db.execute('SELECT total_items FROM exports') + c_db.execute("SELECT total_items FROM exports") except sqlite3.OperationalError: logger.debug("Altering database. Updating database table exports.") c_db.execute( - 'ALTER TABLE exports ADD COLUMN exported_items INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN exported_items INTEGER DEFAULT 0" ) c_db.execute( - 'ALTER TABLE exports ADD COLUMN total_items INTEGER DEFAULT 0' + "ALTER TABLE exports ADD COLUMN total_items INTEGER DEFAULT 0" ) # Fix unique constraints try: - c_db.execute('DELETE FROM tvmaze_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM tvmaze_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM tvmaze_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM tvmaze_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM themoviedb_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM themoviedb_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM themoviedb_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM themoviedb_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM musicbrainz_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM musicbrainz_lookup GROUP BY rating_key)') + c_db.execute("DELETE FROM musicbrainz_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM musicbrainz_lookup GROUP BY rating_key)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM image_hash_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM image_hash_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM image_hash_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM image_hash_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM cloudinary_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM cloudinary_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM cloudinary_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM cloudinary_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass try: - c_db.execute('DELETE FROM imgur_lookup ' - 'WHERE id NOT IN (SELECT MIN(id) FROM imgur_lookup GROUP BY img_hash)') + c_db.execute("DELETE FROM imgur_lookup " + "WHERE id NOT IN (SELECT MIN(id) FROM imgur_lookup GROUP BY img_hash)") except sqlite3.OperationalError: pass # Add "Local" user to database as default unauthenticated user. - result = c_db.execute('SELECT id FROM users WHERE username = "Local"') + result = c_db.execute("SELECT id FROM users WHERE username = 'Local'") if not result.fetchone(): logger.debug("User 'Local' does not exist. Adding user.") - c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")') + c_db.execute("INSERT INTO users (user_id, username) VALUES (0, 'Local')") # Create session_history table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_type" ' - 'ON "session_history" ("media_type")' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_type " + "ON session_history (media_type)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_type_stopped" ' - 'ON "session_history" ("media_type", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_type_stopped " + "ON session_history (media_type, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_rating_key" ' - 'ON "session_history" ("rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_rating_key " + "ON session_history (rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_parent_rating_key" ' - 'ON "session_history" ("parent_rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_parent_rating_key " + "ON session_history (parent_rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_grandparent_rating_key" ' - 'ON "session_history" ("grandparent_rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_grandparent_rating_key " + "ON session_history (grandparent_rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user" ' - 'ON "session_history" ("user")' + "CREATE INDEX IF NOT EXISTS idx_session_history_user " + "ON session_history (user)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user_id" ' - 'ON "session_history" ("user_id")' + "CREATE INDEX IF NOT EXISTS idx_session_history_user_id " + "ON session_history (user_id)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_user_id_stopped" ' - 'ON "session_history" ("user_id", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_user_id_stopped " + "ON session_history (user_id, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_section_id" ' - 'ON "session_history" ("section_id")' + "CREATE INDEX IF NOT EXISTS idx_session_history_section_id " + "ON session_history (section_id)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_section_id_stopped" ' - 'ON "session_history" ("section_id", "stopped" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_section_id_stopped " + "ON session_history (section_id, stopped ASC)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_reference_id" ' - 'ON "session_history" ("reference_id" ASC)' + "CREATE INDEX IF NOT EXISTS idx_session_history_reference_id " + "ON session_history (reference_id ASC)" ) # Create session_history_metadata table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_rating_key" ' - 'ON "session_history_metadata" ("rating_key")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_rating_key " + "ON session_history_metadata (rating_key)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_guid" ' - 'ON "session_history_metadata" ("guid")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_guid " + "ON session_history_metadata (guid)" ) c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_metadata_live" ' - 'ON "session_history_metadata" ("live")' + "CREATE INDEX IF NOT EXISTS idx_session_history_metadata_live " + "ON session_history_metadata (live)" ) # Create session_history_media_info table indices c_db.execute( - 'CREATE INDEX IF NOT EXISTS "idx_session_history_media_info_transcode_decision" ' - 'ON "session_history_media_info" ("transcode_decision")' + "CREATE INDEX IF NOT EXISTS idx_session_history_media_info_transcode_decision " + "ON session_history_media_info (transcode_decision)" ) # Create lookup table indices c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_tvmaze_lookup" ' - 'ON "tvmaze_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_tvmaze_lookup " + "ON tvmaze_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_themoviedb_lookup" ' - 'ON "themoviedb_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_themoviedb_lookup " + "ON themoviedb_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_musicbrainz_lookup" ' - 'ON "musicbrainz_lookup" ("rating_key")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_musicbrainz_lookup " + "ON musicbrainz_lookup (rating_key)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_image_hash_lookup" ' - 'ON "image_hash_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_image_hash_lookup " + "ON image_hash_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_cloudinary_lookup" ' - 'ON "cloudinary_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_cloudinary_lookup " + "ON cloudinary_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_imgur_lookup" ' - 'ON "imgur_lookup" ("img_hash")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_imgur_lookup " + "ON imgur_lookup (img_hash)" ) c_db.execute( - 'CREATE UNIQUE INDEX IF NOT EXISTS "idx_sessions_continued" ' - 'ON "sessions_continued" ("user_id", "machine_id", "media_type")' + "CREATE UNIQUE INDEX IF NOT EXISTS idx_sessions_continued " + "ON sessions_continued (user_id, machine_id, media_type)" ) # Set database version - result = c_db.execute('SELECT value FROM version_info WHERE key = "version"').fetchone() + result = c_db.execute("SELECT value FROM version_info WHERE key = 'version'").fetchone() if not result: c_db.execute( - 'INSERT OR REPLACE INTO version_info (key, value) VALUES ("version", ?)', + "INSERT OR REPLACE INTO version_info (key, value) VALUES ('version', ?)", [common.RELEASE] ) elif helpers.version_to_tuple(result[0]) < helpers.version_to_tuple(common.RELEASE): c_db.execute( - 'UPDATE version_info SET value = ? WHERE key = "version"', + "UPDATE version_info SET value = ? WHERE key = 'version'", [common.RELEASE] ) @@ -2727,9 +2730,9 @@ def dbcheck(): # Migrate poster_urls to imgur_lookup table try: db = database.MonitorDatabase() - result = db.select('SELECT SQL FROM sqlite_master WHERE type="table" AND name="poster_urls"') + result = db.select("SELECT SQL FROM sqlite_master WHERE type='table' AND name='poster_urls'") if result: - result = db.select('SELECT * FROM poster_urls') + result = db.select("SELECT * FROM poster_urls") logger.debug("Altering database. Updating database table imgur_lookup.") data_factory = datafactory.DataFactory() diff --git a/plexpy/activity_pinger.py b/plexpy/activity_pinger.py index f1aac594..f349268c 100644 --- a/plexpy/activity_pinger.py +++ b/plexpy/activity_pinger.py @@ -99,22 +99,22 @@ def check_active_sessions(ws_request=False): # Using the set config parameter as the interval, probably not the most accurate but # it will have to do for now. If it's a websocket request don't use this method. paused_counter = int(stream['paused_counter']) + plexpy.CONFIG.MONITORING_INTERVAL - monitor_db.action('UPDATE sessions SET paused_counter = ? ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET paused_counter = ? " + "WHERE session_key = ? AND rating_key = ?", [paused_counter, stream['session_key'], stream['rating_key']]) if session['state'] == 'buffering' and plexpy.CONFIG.BUFFER_THRESHOLD > 0: # The stream is buffering so we need to increment the buffer_count # We're going just increment on every monitor ping, # would be difficult to keep track otherwise - monitor_db.action('UPDATE sessions SET buffer_count = buffer_count + 1 ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET buffer_count = buffer_count + 1 " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) # Check the current buffer count and last buffer to determine if we should notify - buffer_values = monitor_db.select('SELECT buffer_count, buffer_last_triggered ' - 'FROM sessions ' - 'WHERE session_key = ? AND rating_key = ?', + buffer_values = monitor_db.select("SELECT buffer_count, buffer_last_triggered " + "FROM sessions " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) if buffer_values[0]['buffer_count'] >= plexpy.CONFIG.BUFFER_THRESHOLD: @@ -125,9 +125,9 @@ def check_active_sessions(ws_request=False): logger.info("Tautulli Monitor :: User '%s' has triggered a buffer warning." % stream['user']) # Set the buffer trigger time - monitor_db.action('UPDATE sessions ' - 'SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions " + "SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'}) @@ -139,9 +139,9 @@ def check_active_sessions(ws_request=False): logger.info("Tautulli Monitor :: User '%s' has triggered multiple buffer warnings." % stream['user']) # Set the buffer trigger time - monitor_db.action('UPDATE sessions ' - 'SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions " + "SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ? AND rating_key = ?", [stream['session_key'], stream['rating_key']]) plexpy.NOTIFY_QUEUE.put({'stream_data': stream.copy(), 'notify_action': 'on_buffer'}) @@ -171,8 +171,8 @@ def check_active_sessions(ws_request=False): if not stream['stopped']: # Set the stream stop time stream['stopped'] = helpers.timestamp() - monitor_db.action('UPDATE sessions SET stopped = ?, state = ? ' - 'WHERE session_key = ? AND rating_key = ?', + monitor_db.action("UPDATE sessions SET stopped = ?, state = ? " + "WHERE session_key = ? AND rating_key = ?", [stream['stopped'], 'stopped', stream['session_key'], stream['rating_key']]) progress_percent = helpers.get_percent(stream['view_offset'], stream['duration']) diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 71b6e3e0..588e91ce 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -331,10 +331,10 @@ class ActivityProcessor(object): if session['live']: # Check if we should group the session, select the last guid from the user - query = 'SELECT session_history.id, session_history_metadata.guid, session_history.reference_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history.id == session_history_metadata.id ' \ - 'WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 ' + query = "SELECT session_history.id, session_history_metadata.guid, session_history.reference_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history.id == session_history_metadata.id " \ + "WHERE session_history.user_id = ? ORDER BY session_history.id DESC LIMIT 1 " args = [session['user_id']] @@ -351,8 +351,8 @@ class ActivityProcessor(object): else: # Check if we should group the session, select the last two rows from the user - query = 'SELECT id, rating_key, view_offset, reference_id FROM session_history ' \ - 'WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 ' + query = "SELECT id, rating_key, view_offset, reference_id FROM session_history " \ + "WHERE user_id = ? AND rating_key = ? ORDER BY id DESC LIMIT 2 " args = [session['user_id'], session['rating_key']] @@ -375,7 +375,7 @@ class ActivityProcessor(object): marker_first, marker_final ) - query = 'UPDATE session_history SET reference_id = ? WHERE id = ? ' + query = "UPDATE session_history SET reference_id = ? WHERE id = ? " # If previous session view offset less than watched percent, # and new session view offset is greater, @@ -547,12 +547,12 @@ class ActivityProcessor(object): return session['id'] def get_sessions(self, user_id=None, ip_address=None): - query = 'SELECT * FROM sessions' + query = "SELECT * FROM sessions" args = [] if str(user_id).isdigit(): - ip = ' GROUP BY ip_address' if ip_address else '' - query += ' WHERE user_id = ?' + ip + ip = " GROUP BY ip_address" if ip_address else "" + query += " WHERE user_id = ?" + ip args.append(user_id) sessions = self.db.select(query, args) @@ -560,8 +560,8 @@ class ActivityProcessor(object): def get_session_by_key(self, session_key=None): if str(session_key).isdigit(): - session = self.db.select_single('SELECT * FROM sessions ' - 'WHERE session_key = ? ', + session = self.db.select_single("SELECT * FROM sessions " + "WHERE session_key = ? ", args=[session_key]) if session: return session @@ -570,8 +570,8 @@ class ActivityProcessor(object): def get_session_by_id(self, session_id=None): if session_id: - session = self.db.select_single('SELECT * FROM sessions ' - 'WHERE session_id = ? ', + session = self.db.select_single("SELECT * FROM sessions " + "WHERE session_id = ? ", args=[session_id]) if session: return session @@ -597,15 +597,15 @@ class ActivityProcessor(object): def delete_session(self, session_key=None, row_id=None): if str(session_key).isdigit(): - self.db.action('DELETE FROM sessions WHERE session_key = ?', [session_key]) + self.db.action("DELETE FROM sessions WHERE session_key = ?", [session_key]) elif str(row_id).isdigit(): - self.db.action('DELETE FROM sessions WHERE id = ?', [row_id]) + self.db.action("DELETE FROM sessions WHERE id = ?", [row_id]) def set_session_last_paused(self, session_key=None, timestamp=None): if str(session_key).isdigit(): - result = self.db.select('SELECT last_paused, paused_counter ' - 'FROM sessions ' - 'WHERE session_key = ?', args=[session_key]) + result = self.db.select("SELECT last_paused, paused_counter " + "FROM sessions " + "WHERE session_key = ?", args=[session_key]) paused_counter = None for session in result: @@ -626,15 +626,15 @@ class ActivityProcessor(object): def increment_session_buffer_count(self, session_key=None): if str(session_key).isdigit(): - self.db.action('UPDATE sessions SET buffer_count = buffer_count + 1 ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET buffer_count = buffer_count + 1 " + "WHERE session_key = ?", [session_key]) def get_session_buffer_count(self, session_key=None): if str(session_key).isdigit(): - buffer_count = self.db.select_single('SELECT buffer_count ' - 'FROM sessions ' - 'WHERE session_key = ?', + buffer_count = self.db.select_single("SELECT buffer_count " + "FROM sessions " + "WHERE session_key = ?", [session_key]) if buffer_count: return buffer_count['buffer_count'] @@ -643,15 +643,15 @@ class ActivityProcessor(object): def set_session_buffer_trigger_time(self, session_key=None): if str(session_key).isdigit(): - self.db.action('UPDATE sessions SET buffer_last_triggered = strftime("%s","now") ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET buffer_last_triggered = strftime('%s', 'now') " + "WHERE session_key = ?", [session_key]) def get_session_buffer_trigger_time(self, session_key=None): if str(session_key).isdigit(): - last_time = self.db.select_single('SELECT buffer_last_triggered ' - 'FROM sessions ' - 'WHERE session_key = ?', + last_time = self.db.select_single("SELECT buffer_last_triggered " + "FROM sessions " + "WHERE session_key = ?", [session_key]) if last_time: return last_time['buffer_last_triggered'] @@ -660,12 +660,12 @@ class ActivityProcessor(object): def set_temp_stopped(self): stopped_time = helpers.timestamp() - self.db.action('UPDATE sessions SET stopped = ?', [stopped_time]) + self.db.action("UPDATE sessions SET stopped = ?", [stopped_time]) def increment_write_attempts(self, session_key=None): if str(session_key).isdigit(): session = self.get_session_by_key(session_key=session_key) - self.db.action('UPDATE sessions SET write_attempts = ? WHERE session_key = ?', + self.db.action("UPDATE sessions SET write_attempts = ? WHERE session_key = ?", [session['write_attempts'] + 1, session_key]) def set_marker(self, session_key=None, marker_idx=None, marker_type=None): @@ -674,13 +674,13 @@ class ActivityProcessor(object): int(marker_type == 'commercial'), int(marker_type == 'credits') ] - self.db.action('UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET intro = ?, commercial = ?, credits = ?, marker = ? " + "WHERE session_key = ?", marker_args + [marker_idx, session_key]) def set_watched(self, session_key=None): - self.db.action('UPDATE sessions SET watched = ? ' - 'WHERE session_key = ?', + self.db.action("UPDATE sessions SET watched = ? " + "WHERE session_key = ?", [1, session_key]) def write_continued_session(self, user_id=None, machine_id=None, media_type=None, stopped=None): @@ -689,9 +689,9 @@ class ActivityProcessor(object): self.db.upsert(table_name='sessions_continued', key_dict=keys, value_dict=values) def is_initial_stream(self, user_id=None, machine_id=None, media_type=None, started=None): - last_session = self.db.select_single('SELECT stopped ' - 'FROM sessions_continued ' - 'WHERE user_id = ? AND machine_id = ? AND media_type = ? ' - 'ORDER BY stopped DESC', + last_session = self.db.select_single("SELECT stopped " + "FROM sessions_continued " + "WHERE user_id = ? AND machine_id = ? AND media_type = ? " + "ORDER BY stopped DESC", [user_id, machine_id, media_type]) return int(started - last_session.get('stopped', 0) >= plexpy.CONFIG.NOTIFY_CONTINUED_SESSION_THRESHOLD) diff --git a/plexpy/database.py b/plexpy/database.py index 72ce7782..859d9274 100644 --- a/plexpy/database.py +++ b/plexpy/database.py @@ -54,7 +54,7 @@ def validate_database(database=None): return 'Uncaught exception' try: - connection.execute('SELECT started from session_history') + connection.execute("SELECT started from session_history") connection.close() except (sqlite3.OperationalError, sqlite3.DatabaseError, ValueError) as e: logger.error("Tautulli Database :: Invalid database specified: %s", e) @@ -92,11 +92,11 @@ def import_tautulli_db(database=None, method=None, backup=False): set_is_importing(True) db = MonitorDatabase() - db.connection.execute('BEGIN IMMEDIATE') - db.connection.execute('ATTACH ? AS import_db', [database]) + db.connection.execute("BEGIN IMMEDIATE") + db.connection.execute("ATTACH ? AS import_db", [database]) try: - version_info = db.select_single('SELECT * FROM import_db.version_info WHERE key = "version"') + version_info = db.select_single("SELECT * FROM import_db.version_info WHERE key = 'version'") import_db_version = version_info['value'] except (sqlite3.OperationalError, KeyError): import_db_version = 'v2.6.10' @@ -105,7 +105,7 @@ def import_tautulli_db(database=None, method=None, backup=False): import_db_version = helpers.version_to_tuple(import_db_version) # Get the current number of used ids in the session_history table - session_history_seq = db.select_single('SELECT seq FROM sqlite_sequence WHERE name = "session_history"') + session_history_seq = db.select_single("SELECT seq FROM sqlite_sequence WHERE name = 'session_history'") session_history_rows = session_history_seq.get('seq', 0) session_history_tables = ('session_history', 'session_history_metadata', 'session_history_media_info') @@ -113,11 +113,11 @@ def import_tautulli_db(database=None, method=None, backup=False): if method == 'merge': logger.info("Tautulli Database :: Creating temporary database tables to re-index grouped session history.") for table_name in session_history_tables: - db.action('CREATE TABLE {table}_copy AS SELECT * FROM import_db.{table}'.format(table=table_name)) - db.action('UPDATE {table}_copy SET id = id + ?'.format(table=table_name), + db.action("CREATE TABLE {table}_copy AS SELECT * FROM import_db.{table}".format(table=table_name)) + db.action("UPDATE {table}_copy SET id = id + ?".format(table=table_name), [session_history_rows]) if table_name == 'session_history': - db.action('UPDATE {table}_copy SET reference_id = reference_id + ?'.format(table=table_name), + db.action("UPDATE {table}_copy SET reference_id = reference_id + ?".format(table=table_name), [session_history_rows]) # Migrate section_id from session_history_metadata to session_history @@ -128,28 +128,28 @@ def import_tautulli_db(database=None, method=None, backup=False): else: from_db_name = 'import_db' copy = '' - db.action('ALTER TABLE {from_db}.session_history{copy} ' - 'ADD COLUMN section_id INTEGER'.format(from_db=from_db_name, + db.action("ALTER TABLE {from_db}.session_history{copy} " + "ADD COLUMN section_id INTEGER".format(from_db=from_db_name, copy=copy)) - db.action('UPDATE {from_db}.session_history{copy} SET section_id = (' - 'SELECT section_id FROM {from_db}.session_history_metadata{copy} ' - 'WHERE {from_db}.session_history_metadata{copy}.id = ' - '{from_db}.session_history{copy}.id)'.format(from_db=from_db_name, + db.action("UPDATE {from_db}.session_history{copy} SET section_id = (" + "SELECT section_id FROM {from_db}.session_history_metadata{copy} " + "WHERE {from_db}.session_history_metadata{copy}.id = " + "{from_db}.session_history{copy}.id)".format(from_db=from_db_name, copy=copy)) # Keep track of all table columns so that duplicates can be removed after importing table_columns = {} - tables = db.select('SELECT name FROM import_db.sqlite_master ' - 'WHERE type = "table" AND name NOT LIKE "sqlite_%"' - 'ORDER BY name') + tables = db.select("SELECT name FROM import_db.sqlite_master " + "WHERE type = 'table' AND name NOT LIKE 'sqlite_%'" + "ORDER BY name") for table in tables: table_name = table['name'] if table_name == 'sessions' or table_name == 'version_info': # Skip temporary sessions table continue - current_table = db.select('PRAGMA main.table_info({table})'.format(table=table_name)) + current_table = db.select("PRAGMA main.table_info({table})".format(table=table_name)) if not current_table: # Skip table does not exits continue @@ -158,8 +158,8 @@ def import_tautulli_db(database=None, method=None, backup=False): if method == 'overwrite': # Clear the table and reset the autoincrement ids - db.action('DELETE FROM {table}'.format(table=table_name)) - db.action('DELETE FROM sqlite_sequence WHERE name = ?', [table_name]) + db.action("DELETE FROM {table}".format(table=table_name)) + db.action("DELETE FROM sqlite_sequence WHERE name = ?", [table_name]) if method == 'merge' and table_name in session_history_tables: from_db_name = 'main' @@ -170,7 +170,7 @@ def import_tautulli_db(database=None, method=None, backup=False): # Get the list of columns to import current_columns = [c['name'] for c in current_table] - import_table = db.select('PRAGMA {from_db}.table_info({from_table})'.format(from_db=from_db_name, + import_table = db.select("PRAGMA {from_db}.table_info({from_table})".format(from_db=from_db_name, from_table=from_table_name)) if method == 'merge' and table_name not in session_history_tables: @@ -182,29 +182,29 @@ def import_tautulli_db(database=None, method=None, backup=False): insert_columns = ', '.join(import_columns) # Insert the data with ignore instead of replace to be safe - db.action('INSERT OR IGNORE INTO {table} ({columns}) ' - 'SELECT {columns} FROM {from_db}.{from_table}'.format(table=table_name, + db.action("INSERT OR IGNORE INTO {table} ({columns}) " + "SELECT {columns} FROM {from_db}.{from_table}".format(table=table_name, columns=insert_columns, from_db=from_db_name, from_table=from_table_name)) - db.connection.execute('DETACH import_db') + db.connection.execute("DETACH import_db") if method == 'merge': for table_name, columns in sorted(table_columns.items()): duplicate_columns = ', '.join([c for c in columns if c not in ('id', 'reference_id')]) logger.info("Tautulli Database :: Removing duplicate rows from database table '%s'.", table_name) if table_name in session_history_tables[1:]: - db.action('DELETE FROM {table} WHERE id NOT IN ' - '(SELECT id FROM session_history)'.format(table=table_name)) + db.action("DELETE FROM {table} WHERE id NOT IN " + "(SELECT id FROM session_history)".format(table=table_name)) else: - db.action('DELETE FROM {table} WHERE id NOT IN ' - '(SELECT MIN(id) FROM {table} GROUP BY {columns})'.format(table=table_name, + db.action("DELETE FROM {table} WHERE id NOT IN " + "(SELECT MIN(id) FROM {table} GROUP BY {columns})".format(table=table_name, columns=duplicate_columns)) logger.info("Tautulli Database :: Deleting temporary database tables.") for table_name in session_history_tables: - db.action('DROP TABLE {table}_copy'.format(table=table_name)) + db.action("DROP TABLE {table}_copy".format(table=table_name)) vacuum() @@ -217,7 +217,7 @@ def import_tautulli_db(database=None, method=None, backup=False): def integrity_check(): monitor_db = MonitorDatabase() - result = monitor_db.select_single('PRAGMA integrity_check') + result = monitor_db.select_single("PRAGMA integrity_check") return result @@ -227,7 +227,7 @@ def clear_table(table=None): logger.debug("Tautulli Database :: Clearing database table '%s'." % table) try: - monitor_db.action('DELETE FROM %s' % table) + monitor_db.action("DELETE FROM %s" % table) vacuum() return True except Exception as e: @@ -286,7 +286,7 @@ def delete_user_history(user_id=None): monitor_db = MonitorDatabase() # Get all history associated with the user_id - result = monitor_db.select('SELECT id FROM session_history WHERE user_id = ?', + result = monitor_db.select("SELECT id FROM session_history WHERE user_id = ?", [user_id]) row_ids = [row['id'] for row in result] @@ -299,7 +299,7 @@ def delete_library_history(section_id=None): monitor_db = MonitorDatabase() # Get all history associated with the section_id - result = monitor_db.select('SELECT id FROM session_history WHERE section_id = ?', + result = monitor_db.select("SELECT id FROM session_history WHERE section_id = ?", [section_id]) row_ids = [row['id'] for row in result] @@ -312,7 +312,7 @@ def vacuum(): logger.info("Tautulli Database :: Vacuuming database.") try: - monitor_db.action('VACUUM') + monitor_db.action("VACUUM") except Exception as e: logger.error("Tautulli Database :: Failed to vacuum database: %s" % e) @@ -322,7 +322,7 @@ def optimize(): logger.info("Tautulli Database :: Optimizing database.") try: - monitor_db.action('PRAGMA optimize') + monitor_db.action("PRAGMA optimize") except Exception as e: logger.error("Tautulli Database :: Failed to optimize database: %s" % e) @@ -362,7 +362,7 @@ def make_backup(cleanup=False, scheduler=False): os.makedirs(backup_folder) db = MonitorDatabase() - db.connection.execute('BEGIN IMMEDIATE') + db.connection.execute("BEGIN IMMEDIATE") shutil.copyfile(db_filename(), backup_file_fp) db.connection.rollback() @@ -496,6 +496,6 @@ class MonitorDatabase(object): def last_insert_id(self): # Get the last insert row id - result = self.select_single(query='SELECT last_insert_rowid() AS last_id') + result = self.select_single(query="SELECT last_insert_rowid() AS last_id") if result: return result.get('last_id', None) \ No newline at end of file diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index e2ee0c2b..3db32a51 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -93,61 +93,61 @@ class DataFactory(object): group_by = ['session_history.reference_id'] if grouping else ['session_history.id'] columns = [ - 'session_history.reference_id', - 'session_history.id AS row_id', - 'MAX(started) AS date', - 'MIN(started) AS started', - 'MAX(stopped) AS stopped', - 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', - 'session_history.view_offset', - 'session_history.user_id', - 'session_history.user', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name', - 'users.thumb AS user_thumb', - 'users.custom_avatar_url AS custom_thumb', - 'platform', - 'product', - 'player', - 'ip_address', - 'machine_id', - 'location', - 'secure', - 'relayed', - 'session_history.media_type', - '(CASE WHEN session_history_metadata.live = 1 THEN \'live\' ELSE session_history.media_type END) \ - AS media_type_live', - 'session_history_metadata.rating_key', - 'session_history_metadata.parent_rating_key', - 'session_history_metadata.grandparent_rating_key', - 'session_history_metadata.full_title', - 'session_history_metadata.title', - 'session_history_metadata.parent_title', - 'session_history_metadata.grandparent_title', - 'session_history_metadata.original_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ - (CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = "") \ - THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete', - 'session_history_metadata.duration', - 'session_history_metadata.marker_credits_first', - 'session_history_metadata.marker_credits_final', - 'session_history_media_info.transcode_decision', - 'COUNT(*) AS group_count', - 'GROUP_CONCAT(session_history.id) AS group_ids', - 'NULL AS state', - 'NULL AS session_key' + "session_history.reference_id", + "session_history.id AS row_id", + "MAX(started) AS date", + "MIN(started) AS started", + "MAX(stopped) AS stopped", + "SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - \ + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration", + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter", + "session_history.view_offset", + "session_history.user_id", + "session_history.user", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name", + "users.thumb AS user_thumb", + "users.custom_avatar_url AS custom_thumb", + "platform", + "product", + "player", + "ip_address", + "machine_id", + "location", + "secure", + "relayed", + "session_history.media_type", + "(CASE WHEN session_history_metadata.live = 1 THEN 'live' ELSE session_history.media_type END) \ + AS media_type_live", + "session_history_metadata.rating_key", + "session_history_metadata.parent_rating_key", + "session_history_metadata.grandparent_rating_key", + "session_history_metadata.full_title", + "session_history_metadata.title", + "session_history_metadata.parent_title", + "session_history_metadata.grandparent_title", + "session_history_metadata.original_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "MAX((CASE WHEN (view_offset IS NULL OR view_offset = '') THEN 0.1 ELSE view_offset * 1.0 END) / \ + (CASE WHEN (session_history_metadata.duration IS NULL OR session_history_metadata.duration = '') \ + THEN 1.0 ELSE session_history_metadata.duration * 1.0 END) * 100) AS percent_complete", + "session_history_metadata.duration", + "session_history_metadata.marker_credits_first", + "session_history_metadata.marker_credits_final", + "session_history_media_info.transcode_decision", + "COUNT(*) AS group_count", + "GROUP_CONCAT(session_history.id) AS group_ids", + "NULL AS state", + "NULL AS session_key" ] if include_activity: @@ -157,60 +157,60 @@ class DataFactory(object): group_by_union = ['session_key'] columns_union = [ - 'NULL AS reference_id', - 'NULL AS row_id', - 'started AS date', - 'started', - 'stopped', - 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime("%s", "now") - started) END) - \ - SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration', - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter', - 'view_offset', - 'user_id', - 'user', - '(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = "" \ - THEN user ELSE friendly_name END) AS friendly_name', - 'NULL AS user_thumb', - 'NULL AS custom_thumb', - 'platform', - 'product', - 'player', - 'ip_address', - 'machine_id', - 'location', - 'secure', - 'relayed', - 'media_type', - '(CASE WHEN live = 1 THEN \'live\' ELSE media_type END) AS media_type_live', - 'rating_key', - 'parent_rating_key', - 'grandparent_rating_key', - 'full_title', - 'title', - 'parent_title', - 'grandparent_title', - 'original_title', - 'year', - 'media_index', - 'parent_media_index', - 'thumb', - 'parent_thumb', - 'grandparent_thumb', - 'live', - 'added_at', - 'originally_available_at', - 'guid', - 'MAX((CASE WHEN (view_offset IS NULL OR view_offset = "") THEN 0.1 ELSE view_offset * 1.0 END) / \ - (CASE WHEN (duration IS NULL OR duration = "") \ - THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete', - 'duration', - 'NULL AS marker_credits_first', - 'NULL AS marker_credits_final', - 'transcode_decision', - 'NULL AS group_count', - 'NULL AS group_ids', - 'state', - 'session_key' + "NULL AS reference_id", + "NULL AS row_id", + "started AS date", + "started", + "stopped", + "SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE (strftime('%s', 'now') - started) END) - \ + SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS play_duration", + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS paused_counter", + "view_offset", + "user_id", + "user", + "(CASE WHEN friendly_name IS NULL OR TRIM(friendly_name) = '' \ + THEN user ELSE friendly_name END) AS friendly_name", + "NULL AS user_thumb", + "NULL AS custom_thumb", + "platform", + "product", + "player", + "ip_address", + "machine_id", + "location", + "secure", + "relayed", + "media_type", + "(CASE WHEN live = 1 THEN 'live' ELSE media_type END) AS media_type_live", + "rating_key", + "parent_rating_key", + "grandparent_rating_key", + "full_title", + "title", + "parent_title", + "grandparent_title", + "original_title", + "year", + "media_index", + "parent_media_index", + "thumb", + "parent_thumb", + "grandparent_thumb", + "live", + "added_at", + "originally_available_at", + "guid", + "MAX((CASE WHEN (view_offset IS NULL OR view_offset = '') THEN 0.1 ELSE view_offset * 1.0 END) / \ + (CASE WHEN (duration IS NULL OR duration = '') \ + THEN 1.0 ELSE duration * 1.0 END) * 100) AS percent_complete", + "duration", + "NULL AS marker_credits_first", + "NULL AS marker_credits_final", + "transcode_decision", + "NULL AS group_count", + "NULL AS group_ids", + "state", + "session_key" ] else: @@ -392,20 +392,20 @@ class DataFactory(object): if stat == 'top_movies': top_movies = [] try: - query = 'SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.full_title, shm.year ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'movie' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.full_title, shm.year " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e) @@ -444,21 +444,21 @@ class DataFactory(object): elif stat == 'popular_movies': popular_movies = [] try: - query = 'SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "movie" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.full_title, shm.year ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.full_title, shm.year, sh.rating_key, shm.thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'movie' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.full_title, shm.year " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e) @@ -495,22 +495,22 @@ class DataFactory(object): elif stat == 'top_tv': top_tv = [] try: - query = 'SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, ' \ - 'shm.grandparent_thumb, sh.section_id, ' \ - 'shm.year, sh.rating_key, shm.art, sh.media_type, ' \ - 'shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.grandparent_title ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, " \ + "shm.grandparent_thumb, sh.section_id, " \ + "shm.year, sh.rating_key, shm.art, sh.media_type, " \ + "shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'episode' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.grandparent_title " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e) @@ -549,23 +549,23 @@ class DataFactory(object): elif stat == 'popular_tv': popular_tv = [] try: - query = 'SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, ' \ - 'shm.grandparent_thumb, sh.section_id, ' \ - 'shm.year, sh.rating_key, shm.art, sh.media_type, ' \ - 'shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "episode" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.grandparent_title ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, sh.grandparent_rating_key, " \ + "shm.grandparent_thumb, sh.section_id, " \ + "shm.year, sh.rating_key, shm.art, sh.media_type, " \ + "shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'episode' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.grandparent_title " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e) @@ -602,21 +602,21 @@ class DataFactory(object): elif stat == 'top_music': top_music = [] try: - query = 'SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, ' \ - 'sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.original_title, shm.grandparent_title ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, " \ + "sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'track' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.original_title, shm.grandparent_title " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e) @@ -655,22 +655,22 @@ class DataFactory(object): elif stat == 'popular_music': popular_music = [] try: - query = 'SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, ' \ - 'sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, ' \ - 'COUNT(DISTINCT sh.user_id) AS users_watched, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type = "track" %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY shm.original_title, shm.grandparent_title ' \ - 'ORDER BY users_watched DESC, %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.grandparent_title, shm.original_title, shm.year, " \ + "sh.grandparent_rating_key, shm.grandparent_thumb, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, sh.started, shm.live, shm.guid, " \ + "COUNT(DISTINCT sh.user_id) AS users_watched, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) as total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type = 'track' %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY shm.original_title, shm.grandparent_title " \ + "ORDER BY users_watched DESC, %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e) @@ -707,28 +707,28 @@ class DataFactory(object): elif stat == 'top_libraries': top_libraries = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'ls.section_name, ls.section_type, ' \ - 'ls.thumb AS library_thumb, ls.custom_thumb_url AS custom_thumb, ' \ - 'ls.art AS library_art, ls.custom_art_url AS custom_art, ' \ - 'sh.started, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) ' \ - ' AS ls ON sh.section_id = ls.section_id ' \ - 'GROUP BY sh.section_id ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "ls.section_name, ls.section_type, " \ + "ls.thumb AS library_thumb, ls.custom_thumb_url AS custom_thumb, " \ + "ls.art AS library_art, ls.custom_art_url AS custom_art, " \ + "sh.started, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN (SELECT * FROM library_sections WHERE deleted_section = 0) " \ + " AS ls ON sh.section_id = ls.section_id " \ + "GROUP BY sh.section_id " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_libraries: %s." % e) @@ -795,28 +795,28 @@ class DataFactory(object): elif stat == 'top_users': top_users = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'u.thumb AS user_thumb, u.custom_avatar_url AS custom_thumb, ' \ - 'sh.started, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \ - ' THEN u.username ELSE u.friendly_name END) ' \ - ' AS friendly_name, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "u.thumb AS user_thumb, u.custom_avatar_url AS custom_thumb, " \ + "sh.started, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ''" \ + " THEN u.username ELSE u.friendly_name END) " \ + " AS friendly_name, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e) @@ -873,17 +873,17 @@ class DataFactory(object): top_platform = [] try: - query = 'SELECT sh.platform, sh.started, ' \ - 'MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY %s DESC, sh.started DESC ' \ - 'LIMIT %s OFFSET %s ' % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) + query = "SELECT sh.platform, sh.started, " \ + "MAX(sh.started) AS last_watch, COUNT(sh.id) AS total_plays, SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "GROUP BY sh.platform " \ + "ORDER BY %s DESC, sh.started DESC " \ + "LIMIT %s OFFSET %s " % (timestamp, where_id, group_by, sort_type, stats_count, stats_start) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e) @@ -924,65 +924,65 @@ class DataFactory(object): if plexpy.CONFIG.WATCHED_MARKER == 1: watched_threshold = ( - '(CASE WHEN shm.marker_credits_final IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_final END) ' - 'AS watched_threshold' + "(CASE WHEN shm.marker_credits_final IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_final END) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" elif plexpy.CONFIG.WATCHED_MARKER == 2: watched_threshold = ( - '(CASE WHEN shm.marker_credits_first IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_first END) ' - 'AS watched_threshold' + "(CASE WHEN shm.marker_credits_first IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_first END) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" elif plexpy.CONFIG.WATCHED_MARKER == 3: watched_threshold = ( - 'MIN(' - '(CASE WHEN shm.marker_credits_first IS NULL ' - 'THEN sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0 ' - 'ELSE shm.marker_credits_first END), ' - 'sh._duration * (CASE WHEN sh.media_type = "movie" THEN %d ELSE %d END) / 100.0) ' - 'AS watched_threshold' + "MIN(" + "(CASE WHEN shm.marker_credits_first IS NULL " + "THEN sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0 " + "ELSE shm.marker_credits_first END), " + "sh._duration * (CASE WHEN sh.media_type = 'movie' THEN %d ELSE %d END) / 100.0) " + "AS watched_threshold" ) % (movie_watched_percent, tv_watched_percent, movie_watched_percent, tv_watched_percent) - watched_where = '_view_offset >= watched_threshold' + watched_where = "_view_offset >= watched_threshold" else: - watched_threshold = 'NULL AS watched_threshold' + watched_threshold = "NULL AS watched_threshold" watched_where = ( - 'sh.media_type == "movie" AND percent_complete >= %d ' - 'OR sh.media_type == "episode" AND percent_complete >= %d' + "sh.media_type == 'movie' AND percent_complete >= %d " + "OR sh.media_type == 'episode' AND percent_complete >= %d" ) % (movie_watched_percent, tv_watched_percent) last_watched = [] try: - query = 'SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, u.custom_avatar_url as user_thumb, sh.player, sh.section_id, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ""' \ - ' THEN u.username ELSE u.friendly_name END) ' \ - ' AS friendly_name, ' \ - 'MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, ' \ - '(sh._view_offset / sh._duration * 100) AS percent_complete, ' \ - '%s ' \ - 'FROM (SELECT *, MAX(session_history.id), ' \ - ' (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, ' \ - ' (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration ' \ - ' FROM session_history ' \ - ' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND (session_history.media_type = "movie" ' \ - ' OR session_history.media_type = "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id ' \ - 'WHERE %s ' \ - 'GROUP BY sh.id ' \ - 'ORDER BY last_watch DESC ' \ - 'LIMIT %s OFFSET %s' % (watched_threshold, + query = "SELECT sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, u.custom_avatar_url as user_thumb, sh.player, sh.section_id, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = ''" \ + " THEN u.username ELSE u.friendly_name END) " \ + " AS friendly_name, " \ + "MAX(sh.started) AS last_watch, sh._view_offset, sh._duration, " \ + "(sh._view_offset / sh._duration * 100) AS percent_complete, " \ + "%s " \ + "FROM (SELECT *, MAX(session_history.id), " \ + " (CASE WHEN view_offset IS NULL THEN 0.1 ELSE view_offset * 1.0 END) AS _view_offset, " \ + " (CASE WHEN duration IS NULL THEN 1.0 ELSE duration * 1.0 END) AS _duration " \ + " FROM session_history " \ + " JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + " WHERE session_history.stopped >= %s " \ + " AND (session_history.media_type = 'movie' " \ + " OR session_history.media_type = 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "LEFT OUTER JOIN users AS u ON sh.user_id = u.user_id " \ + "WHERE %s " \ + "GROUP BY sh.id " \ + "ORDER BY last_watch DESC " \ + "LIMIT %s OFFSET %s" % (watched_threshold, timestamp, where_id, group_by, watched_where, stats_count, stats_start) result = monitor_db.select(query) @@ -1068,10 +1068,10 @@ class DataFactory(object): most_concurrent = [] try: - base_query = 'SELECT sh.started, sh.stopped ' \ - 'FROM session_history AS sh ' \ - 'JOIN session_history_media_info AS shmi ON sh.id = shmi.id ' \ - 'WHERE sh.stopped >= %s ' % timestamp + base_query = "SELECT sh.started, sh.stopped " \ + "FROM session_history AS sh " \ + "JOIN session_history_media_info AS shmi ON sh.id = shmi.id " \ + "WHERE sh.stopped >= %s " % timestamp title = 'Concurrent Streams' query = base_query @@ -1081,21 +1081,21 @@ class DataFactory(object): title = 'Concurrent Transcodes' query = base_query \ - + 'AND shmi.transcode_decision = "transcode" ' + + "AND shmi.transcode_decision = 'transcode' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) title = 'Concurrent Direct Streams' query = base_query \ - + 'AND shmi.transcode_decision = "copy" ' + + "AND shmi.transcode_decision = 'copy' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) title = 'Concurrent Direct Plays' query = base_query \ - + 'AND shmi.transcode_decision = "direct play" ' + + "AND shmi.transcode_decision = 'direct play' " result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) @@ -1120,21 +1120,21 @@ class DataFactory(object): library_stats = [] try: - query = 'SELECT ls.id, ls.section_id, ls.section_name, ls.section_type, ls.thumb AS library_thumb, ' \ - 'ls.custom_thumb_url AS custom_thumb, ls.art AS library_art, ls.custom_art_url AS custom_art, ' \ - 'ls.count, ls.parent_count, ls.child_count, ' \ - 'sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, ' \ - 'shm.media_index, shm.parent_media_index, ' \ - 'sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, ' \ - 'sh.user, sh.user_id, sh.player, ' \ - 'shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, ' \ - 'MAX(sh.started) AS last_watch ' \ - 'FROM library_sections AS ls ' \ - 'LEFT OUTER JOIN session_history AS sh ON ls.section_id = sh.section_id ' \ - 'LEFT OUTER JOIN session_history_metadata AS shm ON sh.id = shm.id ' \ - 'WHERE ls.section_id IN (%s) AND ls.deleted_section = 0 ' \ - 'GROUP BY ls.id ' \ - 'ORDER BY ls.section_type, ls.count DESC, ls.parent_count DESC, ls.child_count DESC ' % ','.join(library_cards) + query = "SELECT ls.id, ls.section_id, ls.section_name, ls.section_type, ls.thumb AS library_thumb, " \ + "ls.custom_thumb_url AS custom_thumb, ls.art AS library_art, ls.custom_art_url AS custom_art, " \ + "ls.count, ls.parent_count, ls.child_count, " \ + "sh.id, shm.title, shm.grandparent_title, shm.full_title, shm.year, " \ + "shm.media_index, shm.parent_media_index, " \ + "sh.rating_key, shm.grandparent_rating_key, shm.thumb, shm.grandparent_thumb, " \ + "sh.user, sh.user_id, sh.player, " \ + "shm.art, sh.media_type, shm.content_rating, shm.labels, shm.live, shm.guid, " \ + "MAX(sh.started) AS last_watch " \ + "FROM library_sections AS ls " \ + "LEFT OUTER JOIN session_history AS sh ON ls.section_id = sh.section_id " \ + "LEFT OUTER JOIN session_history_metadata AS shm ON sh.id = shm.id " \ + "WHERE ls.section_id IN (%s) AND ls.deleted_section = 0 " \ + "GROUP BY ls.id " \ + "ORDER BY ls.section_type, ls.count DESC, ls.parent_count DESC, ls.child_count DESC " % ",".join(library_cards) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_library_stats: %s." % e) @@ -1228,15 +1228,15 @@ class DataFactory(object): try: if days > 0: if str(rating_key).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays, section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE stopped >= ? ' \ - 'AND (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s))' % ( + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays, section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE stopped >= ? " \ + "AND (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s))" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1245,14 +1245,14 @@ class DataFactory(object): result = [] else: if str(rating_key).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays, section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s))' % ( + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays, section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s))" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1308,20 +1308,20 @@ class DataFactory(object): try: if str(rating_key).isdigit(): - query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \ - 'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \ - 'users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, ' \ - 'COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'section_id ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN users ON users.user_id = session_history.user_id ' \ - 'WHERE (session_history.grandparent_rating_key IN (%s) ' \ - 'OR session_history.parent_rating_key IN (%s) ' \ - 'OR session_history.rating_key IN (%s)) ' \ - 'GROUP BY users.user_id ' \ - 'ORDER BY total_plays DESC, total_time DESC' % ( + query = "SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' " \ + "THEN users.username ELSE users.friendly_name END) AS friendly_name, " \ + "users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, " \ + "COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "section_id " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN users ON users.user_id = session_history.user_id " \ + "WHERE (session_history.grandparent_rating_key IN (%s) " \ + "OR session_history.parent_rating_key IN (%s) " \ + "OR session_history.rating_key IN (%s)) " \ + "GROUP BY users.user_id " \ + "ORDER BY total_plays DESC, total_time DESC" % ( group_by, rating_keys_arg, rating_keys_arg, rating_keys_arg ) @@ -1362,53 +1362,53 @@ class DataFactory(object): user_cond = '' table = 'session_history' if row_id else 'sessions' if session.get_session_user_id(): - user_cond = 'AND %s.user_id = %s ' % (table, session.get_session_user_id()) + user_cond = "AND %s.user_id = %s " % (table, session.get_session_user_id()) if row_id: - query = 'SELECT bitrate, video_full_resolution, ' \ - 'optimized_version, optimized_version_profile, optimized_version_title, ' \ - 'synced_version, synced_version_profile, ' \ - 'container, video_codec, video_bitrate, video_width, video_height, video_framerate, ' \ - 'video_dynamic_range, aspect_ratio, ' \ - 'audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, ' \ - 'subtitle_codec, subtitle_forced, subtitle_language, ' \ - 'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \ - 'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \ - 'stream_video_framerate, stream_video_dynamic_range, ' \ - 'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \ - 'stream_audio_language, stream_audio_language_code, ' \ - 'subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, ' \ - 'transcode_hw_decoding, transcode_hw_encoding, ' \ - 'video_decision, audio_decision, transcode_decision, width, height, container, ' \ - 'transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, ' \ - 'transcode_width, transcode_height, ' \ - 'session_history_metadata.media_type, title, grandparent_title, original_title ' \ - 'FROM session_history_media_info ' \ - 'JOIN session_history ON session_history_media_info.id = session_history.id ' \ - 'JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id ' \ - 'WHERE session_history_media_info.id = ? %s' % user_cond + query = "SELECT bitrate, video_full_resolution, " \ + "optimized_version, optimized_version_profile, optimized_version_title, " \ + "synced_version, synced_version_profile, " \ + "container, video_codec, video_bitrate, video_width, video_height, video_framerate, " \ + "video_dynamic_range, aspect_ratio, " \ + "audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, " \ + "subtitle_codec, subtitle_forced, subtitle_language, " \ + "stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, " \ + "stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, " \ + "stream_video_framerate, stream_video_dynamic_range, " \ + "stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, " \ + "stream_audio_language, stream_audio_language_code, " \ + "subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, " \ + "transcode_hw_decoding, transcode_hw_encoding, " \ + "video_decision, audio_decision, transcode_decision, width, height, container, " \ + "transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, " \ + "transcode_width, transcode_height, " \ + "session_history_metadata.media_type, title, grandparent_title, original_title " \ + "FROM session_history_media_info " \ + "JOIN session_history ON session_history_media_info.id = session_history.id " \ + "JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id " \ + "WHERE session_history_media_info.id = ? %s" % user_cond result = monitor_db.select(query, args=[row_id]) elif session_key: - query = 'SELECT bitrate, video_full_resolution, ' \ - 'optimized_version, optimized_version_profile, optimized_version_title, ' \ - 'synced_version, synced_version_profile, ' \ - 'container, video_codec, video_bitrate, video_width, video_height, video_framerate, ' \ - 'video_dynamic_range, aspect_ratio, ' \ - 'audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, ' \ - 'subtitle_codec, subtitle_forced, subtitle_language, ' \ - 'stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, ' \ - 'stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, ' \ - 'stream_video_framerate, stream_video_dynamic_range, ' \ - 'stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, ' \ - 'stream_audio_language, stream_audio_language_code, ' \ - 'subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, ' \ - 'transcode_hw_decoding, transcode_hw_encoding, ' \ - 'video_decision, audio_decision, transcode_decision, width, height, container, ' \ - 'transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, ' \ - 'transcode_width, transcode_height, ' \ - 'media_type, title, grandparent_title, original_title ' \ - 'FROM sessions ' \ - 'WHERE session_key = ? %s' % user_cond + query = "SELECT bitrate, video_full_resolution, " \ + "optimized_version, optimized_version_profile, optimized_version_title, " \ + "synced_version, synced_version_profile, " \ + "container, video_codec, video_bitrate, video_width, video_height, video_framerate, " \ + "video_dynamic_range, aspect_ratio, " \ + "audio_codec, audio_bitrate, audio_channels, audio_language, audio_language_code, " \ + "subtitle_codec, subtitle_forced, subtitle_language, " \ + "stream_bitrate, stream_video_full_resolution, quality_profile, stream_container_decision, stream_container, " \ + "stream_video_decision, stream_video_codec, stream_video_bitrate, stream_video_width, stream_video_height, " \ + "stream_video_framerate, stream_video_dynamic_range, " \ + "stream_audio_decision, stream_audio_codec, stream_audio_bitrate, stream_audio_channels, " \ + "stream_audio_language, stream_audio_language_code, " \ + "subtitles, stream_subtitle_decision, stream_subtitle_codec, stream_subtitle_forced, stream_subtitle_language, " \ + "transcode_hw_decoding, transcode_hw_encoding, " \ + "video_decision, audio_decision, transcode_decision, width, height, container, " \ + "transcode_container, transcode_video_codec, transcode_audio_codec, transcode_audio_channels, " \ + "transcode_width, transcode_height, " \ + "media_type, title, grandparent_title, original_title " \ + "FROM sessions " \ + "WHERE session_key = ? %s" % user_cond result = monitor_db.select(query, args=[session_key]) else: return None @@ -1499,43 +1499,43 @@ class DataFactory(object): if rating_key or guid: if guid: - where = 'session_history_metadata.guid LIKE ?' + where = "session_history_metadata.guid LIKE ?" args = [guid.split('?')[0] + '%'] # SQLite LIKE wildcard else: - where = 'session_history_metadata.rating_key = ?' + where = "session_history_metadata.rating_key = ?" args = [rating_key] - query = 'SELECT session_history.section_id, session_history_metadata.id, ' \ - 'session_history_metadata.rating_key, session_history_metadata.parent_rating_key, ' \ - 'session_history_metadata.grandparent_rating_key, session_history_metadata.title, ' \ - 'session_history_metadata.parent_title, session_history_metadata.grandparent_title, ' \ - 'session_history_metadata.original_title, session_history_metadata.full_title, ' \ - 'library_sections.section_name, ' \ - 'session_history_metadata.media_index, session_history_metadata.parent_media_index, ' \ - 'session_history_metadata.thumb, ' \ - 'session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, ' \ - 'session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, ' \ - 'session_history_metadata.originally_available_at, session_history_metadata.added_at, ' \ - 'session_history_metadata.updated_at, session_history_metadata.last_viewed_at, ' \ - 'session_history_metadata.content_rating, session_history_metadata.summary, ' \ - 'session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, ' \ - 'session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, ' \ - 'session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, ' \ - 'session_history_metadata.labels, ' \ - 'session_history_media_info.container, session_history_media_info.bitrate, ' \ - 'session_history_media_info.video_codec, session_history_media_info.video_resolution, ' \ - 'session_history_media_info.video_full_resolution, ' \ - 'session_history_media_info.video_framerate, session_history_media_info.audio_codec, ' \ - 'session_history_media_info.audio_channels, session_history_metadata.live, ' \ - 'session_history_metadata.channel_call_sign, session_history_metadata.channel_identifier, ' \ - 'session_history_metadata.channel_thumb ' \ - 'FROM session_history_metadata ' \ - 'JOIN library_sections ON session_history.section_id = library_sections.section_id ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id ' \ - 'WHERE %s ' \ - 'ORDER BY session_history_metadata.id DESC ' \ - 'LIMIT 1' % where + query = "SELECT session_history.section_id, session_history_metadata.id, " \ + "session_history_metadata.rating_key, session_history_metadata.parent_rating_key, " \ + "session_history_metadata.grandparent_rating_key, session_history_metadata.title, " \ + "session_history_metadata.parent_title, session_history_metadata.grandparent_title, " \ + "session_history_metadata.original_title, session_history_metadata.full_title, " \ + "library_sections.section_name, " \ + "session_history_metadata.media_index, session_history_metadata.parent_media_index, " \ + "session_history_metadata.thumb, " \ + "session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, " \ + "session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, " \ + "session_history_metadata.originally_available_at, session_history_metadata.added_at, " \ + "session_history_metadata.updated_at, session_history_metadata.last_viewed_at, " \ + "session_history_metadata.content_rating, session_history_metadata.summary, " \ + "session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, " \ + "session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, " \ + "session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, " \ + "session_history_metadata.labels, " \ + "session_history_media_info.container, session_history_media_info.bitrate, " \ + "session_history_media_info.video_codec, session_history_media_info.video_resolution, " \ + "session_history_media_info.video_full_resolution, " \ + "session_history_media_info.video_framerate, session_history_media_info.audio_codec, " \ + "session_history_media_info.audio_channels, session_history_metadata.live, " \ + "session_history_metadata.channel_call_sign, session_history_metadata.channel_identifier, " \ + "session_history_metadata.channel_thumb " \ + "FROM session_history_metadata " \ + "JOIN library_sections ON session_history.section_id = library_sections.section_id " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id " \ + "WHERE %s " \ + "ORDER BY session_history_metadata.id DESC " \ + "LIMIT 1" % where result = monitor_db.select(query=query, args=args) else: result = [] @@ -1614,14 +1614,14 @@ class DataFactory(object): where, args = datatables.build_custom_where(custom_where=custom_where) try: - query = 'SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration, ' \ - '(CASE WHEN session_history_metadata.live = 1 THEN "live" ELSE session_history.media_type END) ' \ - 'AS media_type_live ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN session_history_media_info ON session_history_media_info.id = session_history.id ' \ - '%s ' % where + query = "SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration, " \ + "(CASE WHEN session_history_metadata.live = 1 THEN 'live' ELSE session_history.media_type END) " \ + "AS media_type_live " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN session_history_media_info ON session_history_media_info.id = session_history.id " \ + "%s " % where result = monitor_db.select(query, args=args) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_total_duration: %s." % e) @@ -1644,7 +1644,7 @@ class DataFactory(object): if session_key: try: - query = 'SELECT ip_address FROM sessions WHERE session_key = %d %s' % (int(session_key), user_cond) + query = "SELECT ip_address FROM sessions WHERE session_key = %d %s" % (int(session_key), user_cond) result = monitor_db.select(query) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_session_ip: %s." % e) @@ -1694,19 +1694,19 @@ class DataFactory(object): where = '' if where_params: - where = 'WHERE ' + ' AND '.join([w + ' = ?' for w in where_params]) + where = "WHERE " + " AND ".join([w + " = ?" for w in where_params]) if order_by: - order_by = 'ORDER BY ' + order_by + ' DESC' + order_by = "ORDER BY " + order_by + " DESC" if service == 'imgur': - query = 'SELECT imgur_title AS img_title, imgur_url AS img_url FROM imgur_lookup ' \ - 'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash ' \ - '%s %s' % (where, order_by) + query = "SELECT imgur_title AS img_title, imgur_url AS img_url FROM imgur_lookup " \ + "JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash " \ + "%s %s" % (where, order_by) elif service == 'cloudinary': - query = 'SELECT cloudinary_title AS img_title, cloudinary_url AS img_url FROM cloudinary_lookup ' \ - 'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash ' \ - '%s %s' % (where, order_by) + query = "SELECT cloudinary_title AS img_title, cloudinary_url AS img_url FROM cloudinary_lookup " \ + "JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash " \ + "%s %s" % (where, order_by) else: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_img_info: " "service not provided.") @@ -1754,14 +1754,14 @@ class DataFactory(object): args = [] log_msg = '' if rating_key: - where = 'WHERE rating_key = ?' + where = "WHERE rating_key = ?" args = [rating_key] log_msg = ' for rating_key %s' % rating_key if service.lower() == 'imgur': # Delete from Imgur - query = 'SELECT imgur_title, delete_hash, fallback FROM imgur_lookup ' \ - 'JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash %s' % where + query = "SELECT imgur_title, delete_hash, fallback FROM imgur_lookup " \ + "JOIN image_hash_lookup ON imgur_lookup.img_hash = image_hash_lookup.img_hash %s" % where results = monitor_db.select(query, args=args) for imgur_info in results: @@ -1772,15 +1772,15 @@ class DataFactory(object): logger.info("Tautulli DataFactory :: Deleting Imgur info%s from the database." % log_msg) - result = monitor_db.action('DELETE FROM imgur_lookup WHERE img_hash ' - 'IN (SELECT img_hash FROM image_hash_lookup %s)' % where, + result = monitor_db.action("DELETE FROM imgur_lookup WHERE img_hash " + "IN (SELECT img_hash FROM image_hash_lookup %s)" % where, args) elif service.lower() == 'cloudinary': # Delete from Cloudinary - query = 'SELECT cloudinary_title, rating_key, fallback FROM cloudinary_lookup ' \ - 'JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash %s ' \ - 'GROUP BY rating_key' % where + query = "SELECT cloudinary_title, rating_key, fallback FROM cloudinary_lookup " \ + "JOIN image_hash_lookup ON cloudinary_lookup.img_hash = image_hash_lookup.img_hash %s " \ + "GROUP BY rating_key" % where results = monitor_db.select(query, args=args) if delete_all: @@ -1791,8 +1791,8 @@ class DataFactory(object): logger.info("Tautulli DataFactory :: Deleting Cloudinary info%s from the database." % log_msg) - result = monitor_db.action('DELETE FROM cloudinary_lookup WHERE img_hash ' - 'IN (SELECT img_hash FROM image_hash_lookup %s)' % where, + result = monitor_db.action("DELETE FROM cloudinary_lookup WHERE img_hash " + "IN (SELECT img_hash FROM image_hash_lookup %s)" % where, args) else: @@ -1883,15 +1883,15 @@ class DataFactory(object): if rating_key: logger.info("Tautulli DataFactory :: Deleting lookup info for rating_key %s from the database." % rating_key) - result_themoviedb = monitor_db.action('DELETE FROM themoviedb_lookup WHERE rating_key = ?', [rating_key]) - result_tvmaze = monitor_db.action('DELETE FROM tvmaze_lookup WHERE rating_key = ?', [rating_key]) - result_musicbrainz = monitor_db.action('DELETE FROM musicbrainz_lookup WHERE rating_key = ?', [rating_key]) + result_themoviedb = monitor_db.action("DELETE FROM themoviedb_lookup WHERE rating_key = ?", [rating_key]) + result_tvmaze = monitor_db.action("DELETE FROM tvmaze_lookup WHERE rating_key = ?", [rating_key]) + result_musicbrainz = monitor_db.action("DELETE FROM musicbrainz_lookup WHERE rating_key = ?", [rating_key]) return bool(result_themoviedb or result_tvmaze or result_musicbrainz) elif service and delete_all: if service.lower() in ('themoviedb', 'tvmaze', 'musicbrainz'): logger.info("Tautulli DataFactory :: Deleting all lookup info for '%s' from the database." % service) - result = monitor_db.action('DELETE FROM %s_lookup' % service.lower()) + result = monitor_db.action("DELETE FROM %s_lookup" % service.lower()) return bool(result) else: logger.error("Tautulli DataFactory :: Unable to delete lookup info: invalid service '%s' provided." @@ -1901,13 +1901,13 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() if rating_key: - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ - 'media_index, parent_media_index, year, media_type ' \ - 'FROM session_history_metadata ' \ - 'WHERE rating_key = ? ' \ - 'OR parent_rating_key = ? ' \ - 'OR grandparent_rating_key = ? ' \ - 'LIMIT 1' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, " \ + "media_index, parent_media_index, year, media_type " \ + "FROM session_history_metadata " \ + "WHERE rating_key = ? " \ + "OR parent_rating_key = ? " \ + "OR grandparent_rating_key = ? " \ + "LIMIT 1" result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key]) else: result = [] @@ -1974,12 +1974,12 @@ class DataFactory(object): # Get the grandparent rating key try: - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key ' \ - 'FROM session_history_metadata ' \ - 'WHERE rating_key = ? ' \ - 'OR parent_rating_key = ? ' \ - 'OR grandparent_rating_key = ? ' \ - 'LIMIT 1' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key " \ + "FROM session_history_metadata " \ + "WHERE rating_key = ? " \ + "OR parent_rating_key = ? " \ + "OR grandparent_rating_key = ? " \ + "LIMIT 1" result = monitor_db.select(query=query, args=[rating_key, rating_key, rating_key]) grandparent_rating_key = result[0]['grandparent_rating_key'] @@ -1988,12 +1988,12 @@ class DataFactory(object): logger.warn("Tautulli DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e) return {} - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ - 'media_index, parent_media_index ' \ - 'FROM session_history_metadata ' \ - 'WHERE {0} = ? ' \ - 'GROUP BY {1} ' \ - 'ORDER BY {1} DESC ' + query = "SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, " \ + "media_index, parent_media_index " \ + "FROM session_history_metadata " \ + "WHERE {0} = ? " \ + "GROUP BY {1} " \ + "ORDER BY {1} DESC " # get grandparent_rating_keys grandparents = {} @@ -2070,13 +2070,13 @@ class DataFactory(object): if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist': # check grandparent_rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE grandparent_rating_key = ? ' + "SELECT id FROM session_history " + "WHERE grandparent_rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['grandparent_rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['grandparent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2085,26 +2085,26 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET grandparent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET grandparent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_metadata SET grandparent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_metadata SET grandparent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album': # check parent_rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE parent_rating_key = ? ' + "SELECT id FROM session_history " + "WHERE parent_rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['parent_rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['parent_rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2113,26 +2113,26 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET parent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET parent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_metadata SET parent_rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_metadata SET parent_rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) else: # check rating_key (2 tables) query = ( - 'SELECT id FROM session_history ' - 'WHERE rating_key = ? ' + "SELECT id FROM session_history " + "WHERE rating_key = ? " ) args = [old_key] if _UPDATE_METADATA_IDS['rating_key_ids']: - query += 'AND id NOT IN (%s)' % ','.join(_UPDATE_METADATA_IDS['rating_key_ids']) + query += "AND id NOT IN (%s)" % ",".join(_UPDATE_METADATA_IDS['rating_key_ids']) ids = [str(row['id']) for row in monitor_db.select(query, args)] if ids: @@ -2141,13 +2141,13 @@ class DataFactory(object): continue monitor_db.action( - 'UPDATE session_history SET rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history SET rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) monitor_db.action( - 'UPDATE session_history_media_info SET rating_key = ? ' - 'WHERE id IN (%s)' % ','.join(ids), + "UPDATE session_history_media_info SET rating_key = ? " + "WHERE id IN (%s)" % ",".join(ids), [new_key] ) @@ -2181,21 +2181,21 @@ class DataFactory(object): monitor_db = database.MonitorDatabase() - query = 'UPDATE session_history SET section_id = ? ' \ - 'WHERE id IN (%s)' % ','.join(ids) + query = "UPDATE session_history SET section_id = ? " \ + "WHERE id IN (%s)" % ",".join(ids) args = [metadata['section_id']] monitor_db.action(query=query, args=args) # Update the session_history_metadata table - query = 'UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, ' \ - 'grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, ' \ - 'original_title = ?, full_title = ?, ' \ - 'media_index = ?, parent_media_index = ?, thumb = ?, parent_thumb = ?, ' \ - 'grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, ' \ - 'added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, ' \ - 'tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, ' \ - 'genres = ?, studio = ?, labels = ? ' \ - 'WHERE id IN (%s)' % ','.join(ids) + query = "UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, " \ + "grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, " \ + "original_title = ?, full_title = ?, " \ + "media_index = ?, parent_media_index = ?, thumb = ?, parent_thumb = ?, " \ + "grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, " \ + "added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, " \ + "tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, " \ + "genres = ?, studio = ?, labels = ? " \ + "WHERE id IN (%s)" % ",".join(ids) args = [metadata['rating_key'], metadata['parent_rating_key'], metadata['grandparent_rating_key'], metadata['title'], metadata['parent_title'], metadata['grandparent_title'], @@ -2212,19 +2212,19 @@ class DataFactory(object): def get_notification_log(self, kwargs=None): data_tables = datatables.DataTables() - columns = ['notify_log.id', - 'notify_log.timestamp', - 'notify_log.session_key', - 'notify_log.rating_key', - 'notify_log.user_id', - 'notify_log.user', - 'notify_log.notifier_id', - 'notify_log.agent_id', - 'notify_log.agent_name', - 'notify_log.notify_action', - 'notify_log.subject_text', - 'notify_log.body_text', - 'notify_log.success' + columns = ["notify_log.id", + "notify_log.timestamp", + "notify_log.session_key", + "notify_log.rating_key", + "notify_log.user_id", + "notify_log.user", + "notify_log.notifier_id", + "notify_log.agent_id", + "notify_log.agent_name", + "notify_log.notify_action", + "notify_log.subject_text", + "notify_log.body_text", + "notify_log.success" ] try: query = data_tables.ssp_query(table_name='notify_log', @@ -2281,8 +2281,8 @@ class DataFactory(object): try: logger.info("Tautulli DataFactory :: Clearing notification logs from database.") - monitor_db.action('DELETE FROM notify_log') - monitor_db.action('VACUUM') + monitor_db.action("DELETE FROM notify_log") + monitor_db.action("VACUUM") return True except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_notification_log: %s." % e) @@ -2291,18 +2291,18 @@ class DataFactory(object): def get_newsletter_log(self, kwargs=None): data_tables = datatables.DataTables() - columns = ['newsletter_log.id', - 'newsletter_log.timestamp', - 'newsletter_log.newsletter_id', - 'newsletter_log.agent_id', - 'newsletter_log.agent_name', - 'newsletter_log.notify_action', - 'newsletter_log.subject_text', - 'newsletter_log.body_text', - 'newsletter_log.start_date', - 'newsletter_log.end_date', - 'newsletter_log.uuid', - 'newsletter_log.success' + columns = ["newsletter_log.id", + "newsletter_log.timestamp", + "newsletter_log.newsletter_id", + "newsletter_log.agent_id", + "newsletter_log.agent_name", + "newsletter_log.notify_action", + "newsletter_log.subject_text", + "newsletter_log.body_text", + "newsletter_log.start_date", + "newsletter_log.end_date", + "newsletter_log.uuid", + "newsletter_log.success" ] try: query = data_tables.ssp_query(table_name='newsletter_log', @@ -2353,8 +2353,8 @@ class DataFactory(object): try: logger.info("Tautulli DataFactory :: Clearing newsletter logs from database.") - monitor_db.action('DELETE FROM newsletter_log') - monitor_db.action('VACUUM') + monitor_db.action("DELETE FROM newsletter_log") + monitor_db.action("VACUUM") return True except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for delete_newsletter_log: %s." % e) @@ -2365,15 +2365,15 @@ class DataFactory(object): if user_id: if history_only: - query = 'SELECT machine_id FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY machine_id' + query = "SELECT machine_id FROM session_history " \ + "WHERE user_id = ? " \ + "GROUP BY machine_id" else: - query = 'SELECT * FROM (' \ - 'SELECT user_id, machine_id FROM session_history ' \ - 'UNION SELECT user_id, machine_id from sessions_continued) ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY machine_id' + query = "SELECT * FROM (" \ + "SELECT user_id, machine_id FROM session_history " \ + "UNION SELECT user_id, machine_id from sessions_continued) " \ + "WHERE user_id = ? " \ + "GROUP BY machine_id" try: result = monitor_db.select(query=query, args=[user_id]) @@ -2390,7 +2390,7 @@ class DataFactory(object): if rating_key: try: - query = 'SELECT * FROM recently_added WHERE rating_key = ?' + query = "SELECT * FROM recently_added WHERE rating_key = ?" result = monitor_db.select(query=query, args=[rating_key]) except Exception as e: logger.warn("Tautulli DataFactory :: Unable to execute database query for get_recently_added_item: %s." % e) diff --git a/plexpy/exporter.py b/plexpy/exporter.py index 908aced7..3a48c6d7 100644 --- a/plexpy/exporter.py +++ b/plexpy/exporter.py @@ -2291,9 +2291,9 @@ class ExportObject(Export): def get_export(export_id): db = database.MonitorDatabase() - result = db.select_single('SELECT timestamp, title, file_format, thumb_level, art_level, ' - 'individual_files, complete ' - 'FROM exports WHERE id = ?', + result = db.select_single("SELECT timestamp, title, file_format, thumb_level, art_level, " + "individual_files, complete " + "FROM exports WHERE id = ?", [export_id]) if result: @@ -2324,7 +2324,7 @@ def delete_export(export_id): if deleted: logger.info("Tautulli Exporter :: Deleting export_id %s from the database.", export_id) db = database.MonitorDatabase() - result = db.action('DELETE FROM exports WHERE id = ?', args=[export_id]) + result = db.action("DELETE FROM exports WHERE id = ?", args=[export_id]) return deleted else: @@ -2349,7 +2349,7 @@ def delete_all_exports(): def cancel_exports(): db = database.MonitorDatabase() - db.action('UPDATE exports SET complete = -1 WHERE complete = 0') + db.action("UPDATE exports SET complete = -1 WHERE complete = 0") def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs=None): @@ -2368,27 +2368,27 @@ def get_export_datatable(section_id=None, user_id=None, rating_key=None, kwargs= if rating_key: custom_where.append(['exports.rating_key', rating_key]) - columns = ['exports.id AS export_id', - 'exports.timestamp', - 'exports.section_id', - 'exports.user_id', - 'exports.rating_key', - 'exports.media_type', - 'CASE WHEN exports.media_type = "photoalbum" THEN "Photo Album" ELSE ' - 'UPPER(SUBSTR(exports.media_type, 1, 1)) || SUBSTR(exports.media_type, 2) END ' - 'AS media_type_title', - 'exports.title', - 'exports.file_format', - 'exports.metadata_level', - 'exports.media_info_level', - 'exports.thumb_level', - 'exports.art_level', - 'exports.custom_fields', - 'exports.individual_files', - 'exports.file_size', - 'exports.complete', - 'exports.total_items', - 'exports.exported_items' + columns = ["exports.id AS export_id", + "exports.timestamp", + "exports.section_id", + "exports.user_id", + "exports.rating_key", + "exports.media_type", + "CASE WHEN exports.media_type = 'photoalbum' THEN 'Photo Album' ELSE " + "UPPER(SUBSTR(exports.media_type, 1, 1)) || SUBSTR(exports.media_type, 2) END " + "AS media_type_title", + "exports.title", + "exports.file_format", + "exports.metadata_level", + "exports.media_info_level", + "exports.thumb_level", + "exports.art_level", + "exports.custom_fields", + "exports.individual_files", + "exports.file_size", + "exports.complete", + "exports.total_items", + "exports.exported_items" ] try: query = data_tables.ssp_query(table_name='exports', diff --git a/plexpy/graphs.py b/plexpy/graphs.py index e9afa704..49dfee57 100644 --- a/plexpy/graphs.py +++ b/plexpy/graphs.py @@ -64,42 +64,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *,' \ - ' date(started, "unixepoch", "localtime") AS date_played ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *," \ + " date(started, 'unixepoch', 'localtime') AS date_played " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *,' \ - ' date(started, "unixepoch", "localtime") AS date_played,' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *," \ + " date(started, 'unixepoch', 'localtime') AS date_played," \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -173,9 +173,9 @@ class Graphs(object): user_cond = '' if session.get_session_user_id() and user_id and user_id != str(session.get_session_user_id()): - user_cond = 'AND session_history.user_id = %s ' % session.get_session_user_id() + user_cond = "AND session_history.user_id = %s " % session.get_session_user_id() elif user_id and user_id.isdigit(): - user_cond = 'AND session_history.user_id = %s ' % user_id + user_cond = "AND session_history.user_id = %s " % user_id if grouping is None: grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES @@ -184,58 +184,58 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.daynumber, ' \ - '(CASE sh.daynumber ' \ - ' WHEN 0 THEN "Sunday" ' \ - ' WHEN 1 THEN "Monday" ' \ - ' WHEN 2 THEN "Tuesday" ' \ - ' WHEN 3 THEN "Wednesday" ' \ - ' WHEN 4 THEN "Thursday" ' \ - ' WHEN 5 THEN "Friday" ' \ - ' ELSE "Saturday" END) AS dayofweek, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' CAST(strftime("%%w", date(started, "unixepoch", "localtime")) AS INTEGER) AS daynumber' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY daynumber, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY sh.daynumber' % (timestamp, user_cond, group_by) + query = "SELECT sh.daynumber, " \ + "(CASE sh.daynumber " \ + " WHEN 0 THEN 'Sunday' " \ + " WHEN 1 THEN 'Monday' " \ + " WHEN 2 THEN 'Tuesday' " \ + " WHEN 3 THEN 'Wednesday' " \ + " WHEN 4 THEN 'Thursday' " \ + " WHEN 5 THEN 'Friday' " \ + " ELSE 'Saturday' END) AS dayofweek, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " CAST(strftime('%%w', date(started, 'unixepoch', 'localtime')) AS INTEGER) AS daynumber" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY daynumber, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY dayofweek " \ + "ORDER BY sh.daynumber" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.daynumber, ' \ - '(CASE sh.daynumber ' \ - ' WHEN 0 THEN "Sunday" ' \ - ' WHEN 1 THEN "Monday" ' \ - ' WHEN 2 THEN "Tuesday" ' \ - ' WHEN 3 THEN "Wednesday" ' \ - ' WHEN 4 THEN "Thursday" ' \ - ' WHEN 5 THEN "Friday" ' \ - ' ELSE "Saturday" END) AS dayofweek, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' CAST(strftime("%%w", date(started, "unixepoch", "localtime")) AS INTEGER) AS daynumber, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY daynumber, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY sh.daynumber' % (timestamp, user_cond, group_by) + query = "SELECT sh.daynumber, " \ + "(CASE sh.daynumber " \ + " WHEN 0 THEN 'Sunday' " \ + " WHEN 1 THEN 'Monday' " \ + " WHEN 2 THEN 'Tuesday' " \ + " WHEN 3 THEN 'Wednesday' " \ + " WHEN 4 THEN 'Thursday' " \ + " WHEN 5 THEN 'Friday' " \ + " ELSE 'Saturday' END) AS dayofweek, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " CAST(strftime('%%w', date(started, 'unixepoch', 'localtime')) AS INTEGER) AS daynumber, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY daynumber, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY dayofweek " \ + "ORDER BY sh.daynumber" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -321,42 +321,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.hourofday, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY hourofday, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.hourofday ' \ - 'ORDER BY sh.hourofday' % (timestamp, user_cond, group_by) + query = "SELECT sh.hourofday, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%H', datetime(started, 'unixepoch', 'localtime')) AS hourofday" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY hourofday, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.hourofday " \ + "ORDER BY sh.hourofday" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.hourofday, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY hourofday, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.hourofday ' \ - 'ORDER BY sh.hourofday' % (timestamp, user_cond, group_by) + query = "SELECT sh.hourofday, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%H', datetime(started, 'unixepoch', 'localtime')) AS hourofday, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY hourofday, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.hourofday " \ + "ORDER BY sh.hourofday" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -440,42 +440,42 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.datestring, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%Y-%%m", datetime(started, "unixepoch", "localtime")) AS datestring' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY datestring, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.datestring ' \ - 'ORDER BY sh.datestring' % (timestamp, user_cond, group_by) + query = "SELECT sh.datestring, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%Y-%%m', datetime(started, 'unixepoch', 'localtime')) AS datestring" \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY datestring, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.datestring " \ + "ORDER BY sh.datestring" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.datestring, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count ' \ - 'FROM (SELECT *, ' \ - ' strftime("%%Y-%%m", datetime(started, "unixepoch", "localtime")) AS datestring, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY datestring, %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.datestring ' \ - 'ORDER BY sh.datestring' % (timestamp, user_cond, group_by) + query = "SELECT sh.datestring, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count " \ + "FROM (SELECT *, " \ + " strftime('%%Y-%%m', datetime(started, 'unixepoch', 'localtime')) AS datestring, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY datestring, %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.datestring " \ + "ORDER BY sh.datestring" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -567,44 +567,44 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_count DESC, sh.platform ASC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_count DESC, sh.platform ASC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -666,50 +666,50 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, ' \ - 'SUM(shm.live) AS live_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 THEN 1 ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 THEN 1 ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 THEN 1 ELSE 0 END) AS music_count, " \ + "SUM(shm.live) AS live_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN sh.media_type = "episode" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS tv_count, ' \ - 'SUM(CASE WHEN sh.media_type = "movie" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS movie_count, ' \ - 'SUM(CASE WHEN sh.media_type = "track" AND shm.live = 0 ' \ - ' THEN sh.d ELSE 0 END) AS music_count, ' \ - 'SUM(CASE WHEN shm.live = 1 ' \ - ' THEN sh.d ELSE 0 END) AS live_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_metadata AS shm ON shm.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY sh.user_id ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN sh.media_type = 'episode' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS tv_count, " \ + "SUM(CASE WHEN sh.media_type = 'movie' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS movie_count, " \ + "SUM(CASE WHEN sh.media_type = 'track' AND shm.live = 0 " \ + " THEN sh.d ELSE 0 END) AS music_count, " \ + "SUM(CASE WHEN shm.live = 1 " \ + " THEN sh.d ELSE 0 END) AS live_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_metadata AS shm ON shm.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY sh.user_id " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -776,36 +776,36 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count ' \ - 'FROM (SELECT *, ' \ - ' date(started, "unixepoch", "localtime") AS date_played ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count " \ + "FROM (SELECT *, " \ + " date(started, 'unixepoch', 'localtime') AS date_played " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.date_played, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count ' \ - 'FROM (SELECT *, ' \ - ' date(started, "unixepoch", "localtime") AS date_played,' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s' \ - ' GROUP BY date_played, %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.date_played ' \ - 'ORDER BY sh.started' % (timestamp, user_cond, group_by) + query = "SELECT sh.date_played, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count " \ + "FROM (SELECT *, " \ + " date(started, 'unixepoch', 'localtime') AS date_played," \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s" \ + " GROUP BY date_played, %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.date_played " \ + "ORDER BY sh.started" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -873,40 +873,40 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT shmi.video_full_resolution AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT shmi.video_full_resolution AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT shmi.video_full_resolution AS resolution,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT shmi.video_full_resolution AS resolution," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -954,66 +954,66 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT ' \ - '(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN ' \ - ' (CASE WHEN shmi.video_decision = "transcode" THEN ' \ - ' (CASE ' \ - ' WHEN shmi.transcode_height <= 360 THEN "SD" ' \ - ' WHEN shmi.transcode_height <= 480 THEN "480" ' \ - ' WHEN shmi.transcode_height <= 576 THEN "576" ' \ - ' WHEN shmi.transcode_height <= 720 THEN "720" ' \ - ' WHEN shmi.transcode_height <= 1080 THEN "1080" ' \ - ' WHEN shmi.transcode_height <= 1440 THEN "QHD" ' \ - ' WHEN shmi.transcode_height <= 2160 THEN "4k" ' \ - ' ELSE "unknown" END)' \ - ' ELSE shmi.video_full_resolution END) ' \ - ' ELSE shmi.stream_video_full_resolution END) AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT " \ + "(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN " \ + " (CASE WHEN shmi.video_decision = 'transcode' THEN " \ + " (CASE " \ + " WHEN shmi.transcode_height <= 360 THEN 'SD' " \ + " WHEN shmi.transcode_height <= 480 THEN '480' " \ + " WHEN shmi.transcode_height <= 576 THEN '576' " \ + " WHEN shmi.transcode_height <= 720 THEN '720' " \ + " WHEN shmi.transcode_height <= 1080 THEN '1080' " \ + " WHEN shmi.transcode_height <= 1440 THEN 'QHD' " \ + " WHEN shmi.transcode_height <= 2160 THEN '4k' " \ + " ELSE 'unknown' END)" \ + " ELSE shmi.video_full_resolution END) " \ + " ELSE shmi.stream_video_full_resolution END) AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT ' \ - '(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN ' \ - ' (CASE WHEN shmi.video_decision = "transcode" THEN ' \ - ' (CASE ' \ - ' WHEN shmi.transcode_height <= 360 THEN "SD" ' \ - ' WHEN shmi.transcode_height <= 480 THEN "480" ' \ - ' WHEN shmi.transcode_height <= 576 THEN "576" ' \ - ' WHEN shmi.transcode_height <= 720 THEN "720" ' \ - ' WHEN shmi.transcode_height <= 1080 THEN "1080" ' \ - ' WHEN shmi.transcode_height <= 1440 THEN "QHD" ' \ - ' WHEN shmi.transcode_height <= 2160 THEN "4k" ' \ - ' ELSE "unknown" END)' \ - ' ELSE shmi.video_full_resolution END) ' \ - ' ELSE shmi.stream_video_full_resolution END) AS resolution, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s ' \ - ' AND session_history.media_type IN ("movie", "episode") %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT " \ + "(CASE WHEN shmi.stream_video_full_resolution IS NULL THEN " \ + " (CASE WHEN shmi.video_decision = 'transcode' THEN " \ + " (CASE " \ + " WHEN shmi.transcode_height <= 360 THEN 'SD' " \ + " WHEN shmi.transcode_height <= 480 THEN '480' " \ + " WHEN shmi.transcode_height <= 576 THEN '576' " \ + " WHEN shmi.transcode_height <= 720 THEN '720' " \ + " WHEN shmi.transcode_height <= 1080 THEN '1080' " \ + " WHEN shmi.transcode_height <= 1440 THEN 'QHD' " \ + " WHEN shmi.transcode_height <= 2160 THEN '4k' " \ + " ELSE 'unknown' END)" \ + " ELSE shmi.video_full_resolution END) " \ + " ELSE shmi.stream_video_full_resolution END) AS resolution, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s " \ + " AND session_history.media_type IN ('movie', 'episode') %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY resolution " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -1061,38 +1061,38 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT sh.platform, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'GROUP BY sh.platform ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT sh.platform, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "GROUP BY sh.platform " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: @@ -1141,44 +1141,44 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN 1 ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN 1 ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN 1 ELSE 0 END) AS tc_count, ' \ - 'COUNT(sh.id) AS total_count ' \ - 'FROM (SELECT * ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY u.user_id ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN 1 ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN 1 ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN 1 ELSE 0 END) AS tc_count, " \ + "COUNT(sh.id) AS total_count " \ + "FROM (SELECT * " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY u.user_id " \ + "ORDER BY total_count DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) else: - query = 'SELECT u.user_id, u.username, ' \ - '(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = "" ' \ - ' THEN u.username ELSE u.friendly_name END) AS friendly_name,' \ - 'SUM(CASE WHEN shmi.transcode_decision = "direct play" THEN sh.d ELSE 0 END) AS dp_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "copy" THEN sh.d ELSE 0 END) AS ds_count, ' \ - 'SUM(CASE WHEN shmi.transcode_decision = "transcode" THEN sh.d ELSE 0 END) AS tc_count, ' \ - 'SUM(sh.d) AS total_duration ' \ - 'FROM (SELECT *, ' \ - ' SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ - ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ - ' FROM session_history ' \ - ' WHERE session_history.stopped >= %s %s ' \ - ' GROUP BY %s) AS sh ' \ - 'JOIN session_history_media_info AS shmi ON shmi.id = sh.id ' \ - 'JOIN users AS u ON u.user_id = sh.user_id ' \ - 'GROUP BY u.user_id ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' % (timestamp, user_cond, group_by) + query = "SELECT u.user_id, u.username, " \ + "(CASE WHEN u.friendly_name IS NULL OR TRIM(u.friendly_name) = '' " \ + " THEN u.username ELSE u.friendly_name END) AS friendly_name," \ + "SUM(CASE WHEN shmi.transcode_decision = 'direct play' THEN sh.d ELSE 0 END) AS dp_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'copy' THEN sh.d ELSE 0 END) AS ds_count, " \ + "SUM(CASE WHEN shmi.transcode_decision = 'transcode' THEN sh.d ELSE 0 END) AS tc_count, " \ + "SUM(sh.d) AS total_duration " \ + "FROM (SELECT *, " \ + " SUM(CASE WHEN stopped > 0 THEN (stopped - started) - " \ + " (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) " \ + " AS d " \ + " FROM session_history " \ + " WHERE session_history.stopped >= %s %s " \ + " GROUP BY %s) AS sh " \ + "JOIN session_history_media_info AS shmi ON shmi.id = sh.id " \ + "JOIN users AS u ON u.user_id = sh.user_id " \ + "GROUP BY u.user_id " \ + "ORDER BY total_duration DESC " \ + "LIMIT 10" % (timestamp, user_cond, group_by) result = monitor_db.select(query) except Exception as e: diff --git a/plexpy/libraries.py b/plexpy/libraries.py index ba30efa9..33832aba 100644 --- a/plexpy/libraries.py +++ b/plexpy/libraries.py @@ -95,8 +95,8 @@ def refresh_libraries(): add_live_tv_library(refresh=True) - query = 'UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR ' \ - 'section_id NOT IN ({})'.format(', '.join(['?'] * len(section_ids))) + query = "UPDATE library_sections SET is_active = 0 WHERE server_id != ? OR " \ + "section_id NOT IN ({})".format(", ".join(["?"] * len(section_ids))) monitor_db.action(query=query, args=[plexpy.CONFIG.PMS_IDENTIFIER] + section_ids) new_keys = plexpy.CONFIG.HOME_LIBRARY_CARDS + new_keys @@ -112,8 +112,8 @@ def refresh_libraries(): def add_live_tv_library(refresh=False): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM library_sections ' - 'WHERE section_id = ? and server_id = ?', + result = monitor_db.select_single("SELECT * FROM library_sections " + "WHERE section_id = ? and server_id = ?", [common.LIVE_TV_SECTION_ID, plexpy.CONFIG.PMS_IDENTIFIER]) if result and not refresh or not result and refresh: @@ -138,7 +138,7 @@ def add_live_tv_library(refresh=False): def has_library_type(section_type): monitor_db = database.MonitorDatabase() - query = 'SELECT * FROM library_sections WHERE section_type = ? AND deleted_section = 0' + query = "SELECT * FROM library_sections WHERE section_type = ? AND deleted_section = 0" args = [section_type] result = monitor_db.select_single(query=query, args=args) return bool(result) @@ -328,44 +328,44 @@ class Libraries(object): group_by = 'session_history.reference_id' if grouping else 'session_history.id' - columns = ['library_sections.id AS row_id', - 'library_sections.server_id', - 'library_sections.section_id', - 'library_sections.section_name', - 'library_sections.section_type', - 'library_sections.count', - 'library_sections.parent_count', - 'library_sections.child_count', - 'library_sections.thumb AS library_thumb', - 'library_sections.custom_thumb_url AS custom_thumb', - 'library_sections.art AS library_art', - 'library_sections.custom_art_url AS custom_art', - 'COUNT(DISTINCT %s) AS plays' % group_by, - 'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ + columns = ["library_sections.id AS row_id", + "library_sections.server_id", + "library_sections.section_id", + "library_sections.section_name", + "library_sections.section_type", + "library_sections.count", + "library_sections.parent_count", + "library_sections.child_count", + "library_sections.thumb AS library_thumb", + "library_sections.custom_thumb_url AS custom_thumb", + "library_sections.art AS library_art", + "library_sections.custom_art_url AS custom_art", + "COUNT(DISTINCT %s) AS plays" % group_by, + "SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \ - session_history.paused_counter END) AS duration', - 'MAX(session_history.started) AS last_accessed', - 'MAX(session_history.id) AS history_row_id', - 'session_history_metadata.full_title AS last_played', - 'session_history.rating_key', - 'session_history_metadata.media_type', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.content_rating', - 'session_history_metadata.labels', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'library_sections.do_notify', - 'library_sections.do_notify_created', - 'library_sections.keep_history', - 'library_sections.is_active' + session_history.paused_counter END) AS duration", + "MAX(session_history.started) AS last_accessed", + "MAX(session_history.id) AS history_row_id", + "session_history_metadata.full_title AS last_played", + "session_history.rating_key", + "session_history_metadata.media_type", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.content_rating", + "session_history_metadata.labels", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "library_sections.do_notify", + "library_sections.do_notify_created", + "library_sections.keep_history", + "library_sections.is_active" ] try: query = data_tables.ssp_query(table_name='library_sections', @@ -499,11 +499,11 @@ class Libraries(object): group_by = 'rating_key' try: - query = 'SELECT MAX(started) AS last_played, COUNT(DISTINCT %s) AS play_count, ' \ - 'rating_key, parent_rating_key, grandparent_rating_key ' \ - 'FROM session_history ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY %s ' % (count_by, group_by) + query = "SELECT MAX(started) AS last_played, COUNT(DISTINCT %s) AS play_count, " \ + "rating_key, parent_rating_key, grandparent_rating_key " \ + "FROM session_history " \ + "WHERE section_id = ? " \ + "GROUP BY %s " % (count_by, group_by) result = monitor_db.select(query, args=[section_id]) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) @@ -838,27 +838,27 @@ class Libraries(object): last_accessed = 'NULL' join = '' if include_last_accessed: - last_accessed = 'MAX(session_history.started)' - join = 'LEFT OUTER JOIN session_history ON library_sections.section_id = session_history.section_id ' \ + last_accessed = "MAX(session_history.started)" + join = "LEFT OUTER JOIN session_history ON library_sections.section_id = session_history.section_id " \ monitor_db = database.MonitorDatabase() try: if str(section_id).isdigit(): - where = 'library_sections.section_id = ?' + where = "library_sections.section_id = ?" args = [section_id] else: raise Exception('Missing section_id') - query = 'SELECT library_sections.id AS row_id, server_id, library_sections.section_id, ' \ - 'section_name, section_type, ' \ - 'count, parent_count, child_count, ' \ - 'library_sections.thumb AS library_thumb, custom_thumb_url AS custom_thumb, ' \ - 'library_sections.art AS library_art, ' \ - 'custom_art_url AS custom_art, is_active, ' \ - 'do_notify, do_notify_created, keep_history, deleted_section, %s AS last_accessed ' \ - 'FROM library_sections %s ' \ - 'WHERE %s AND server_id = ? ' % (last_accessed, join, where) + query = "SELECT library_sections.id AS row_id, server_id, library_sections.section_id, " \ + "section_name, section_type, " \ + "count, parent_count, child_count, " \ + "library_sections.thumb AS library_thumb, custom_thumb_url AS custom_thumb, " \ + "library_sections.art AS library_art, " \ + "custom_art_url AS custom_art, is_active, " \ + "do_notify, do_notify_created, keep_history, deleted_section, %s AS last_accessed " \ + "FROM library_sections %s " \ + "WHERE %s AND server_id = ? " % (last_accessed, join, where) result = monitor_db.select(query, args=args + [server_id]) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_library_details: %s." % e) @@ -924,24 +924,24 @@ class Libraries(object): try: if days > 0: if str(section_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE stopped >= %s ' \ - 'AND section_id = ?' % (group_by, timestamp_query) + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE stopped >= %s " \ + "AND section_id = ?" % (group_by, timestamp_query) result = monitor_db.select(query, args=[section_id]) else: result = [] else: if str(section_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'WHERE section_id = ?' % group_by + query = "SELECT (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "WHERE section_id = ?" % group_by result = monitor_db.select(query, args=[section_id]) else: result = [] @@ -981,17 +981,17 @@ class Libraries(object): try: if str(section_id).isdigit(): - query = 'SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" ' \ - 'THEN users.username ELSE users.friendly_name END) AS friendly_name, ' \ - 'users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, ' \ - 'COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time ' \ - 'FROM session_history ' \ - 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ - 'JOIN users ON users.user_id = session_history.user_id ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY users.user_id ' \ - 'ORDER BY total_plays DESC, total_time DESC' % group_by + query = "SELECT (CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' " \ + "THEN users.username ELSE users.friendly_name END) AS friendly_name, " \ + "users.user_id, users.username, users.thumb, users.custom_avatar_url AS custom_thumb, " \ + "COUNT(DISTINCT %s) AS total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time " \ + "FROM session_history " \ + "JOIN session_history_metadata ON session_history_metadata.id = session_history.id " \ + "JOIN users ON users.user_id = session_history.user_id " \ + "WHERE section_id = ? " \ + "GROUP BY users.user_id " \ + "ORDER BY total_plays DESC, total_time DESC" % group_by result = monitor_db.select(query, args=[section_id]) else: result = [] @@ -1030,16 +1030,16 @@ class Libraries(object): try: if str(section_id).isdigit(): - query = 'SELECT session_history.id, session_history.media_type, guid, ' \ - 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, ' \ - 'title, parent_title, grandparent_title, original_title, ' \ - 'thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, originally_available_at, added_at, live, started, user, content_rating, labels, section_id ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE section_id = ? ' \ - 'GROUP BY session_history.rating_key ' \ - 'ORDER BY MAX(started) DESC LIMIT ?' + query = "SELECT session_history.id, session_history.media_type, guid, " \ + "session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, " \ + "title, parent_title, grandparent_title, original_title, " \ + "thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, " \ + "year, originally_available_at, added_at, live, started, user, content_rating, labels, section_id " \ + "FROM session_history_metadata " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "WHERE section_id = ? " \ + "GROUP BY session_history.rating_key " \ + "ORDER BY MAX(started) DESC LIMIT ?" result = monitor_db.select(query, args=[section_id, limit]) else: result = [] @@ -1085,8 +1085,8 @@ class Libraries(object): monitor_db = database.MonitorDatabase() try: - query = 'SELECT section_id, section_name, section_type, agent ' \ - 'FROM library_sections WHERE deleted_section = 0' + query = "SELECT section_id, section_name, section_type, agent " \ + "FROM library_sections WHERE deleted_section = 0" result = monitor_db.select(query=query) except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for get_sections: %s." % e) @@ -1110,8 +1110,8 @@ class Libraries(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) # Get the section_ids corresponding to the row_ids - result = monitor_db.select('SELECT server_id, section_id FROM library_sections ' - 'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids) + result = monitor_db.select("SELECT server_id, section_id FROM library_sections " + "WHERE id IN ({})".format(",".join(["?"] * len(row_ids))), row_ids) success = [] for library in result: @@ -1135,9 +1135,9 @@ class Libraries(object): logger.info("Tautulli Libraries :: Deleting library with server_id %s and section_id %s from database." % (server_id, section_id)) try: - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 1, keep_history = 0, do_notify = 0, do_notify_created = 0 ' - 'WHERE server_id = ? AND section_id = ?', [server_id, section_id]) + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 1, keep_history = 0, do_notify = 0, do_notify_created = 0 " + "WHERE server_id = ? AND section_id = ?", [server_id, section_id]) return delete_success except Exception as e: logger.warn("Tautulli Libraries :: Unable to execute database query for delete: %s." % e) @@ -1150,26 +1150,26 @@ class Libraries(object): try: if section_id and section_id.isdigit(): - query = 'SELECT * FROM library_sections WHERE section_id = ?' + query = "SELECT * FROM library_sections WHERE section_id = ?" result = monitor_db.select(query=query, args=[section_id]) if result: logger.info("Tautulli Libraries :: Re-adding library with id %s to database." % section_id) - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 ' - 'WHERE section_id = ?', + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 " + "WHERE section_id = ?", [section_id]) return True else: return False elif section_name: - query = 'SELECT * FROM library_sections WHERE section_name = ?' + query = "SELECT * FROM library_sections WHERE section_name = ?" result = monitor_db.select(query=query, args=[section_name]) if result: logger.info("Tautulli Libraries :: Re-adding library with name %s to database." % section_name) - monitor_db.action('UPDATE library_sections ' - 'SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 ' - 'WHERE section_name = ?', + monitor_db.action("UPDATE library_sections " + "SET deleted_section = 0, keep_history = 1, do_notify = 1, do_notify_created = 1 " + "WHERE section_name = ?", [section_name]) return True else: @@ -1203,7 +1203,7 @@ class Libraries(object): try: logger.debug("Tautulli Libraries :: Deleting libraries where server_id does not match %s." % server_id) - monitor_db.action('DELETE FROM library_sections WHERE server_id != ?', [server_id]) + monitor_db.action("DELETE FROM library_sections WHERE server_id != ?", [server_id]) return 'Deleted duplicate libraries from the database.' except Exception as e: diff --git a/plexpy/mobile_app.py b/plexpy/mobile_app.py index 458f84a0..57734975 100644 --- a/plexpy/mobile_app.py +++ b/plexpy/mobile_app.py @@ -67,17 +67,17 @@ def get_mobile_devices(device_id=None, device_token=None): args = [] if device_id or device_token: - where = 'WHERE ' + where = "WHERE " if device_id: - where_id += 'device_id = ?' + where_id += "device_id = ?" args.append(device_id) if device_token: - where_token = 'device_token = ?' + where_token = "device_token = ?" args.append(device_token) - where += ' AND '.join([w for w in [where_id, where_token] if w]) + where += " AND ".join([w for w in [where_id, where_token] if w]) db = database.MonitorDatabase() - result = db.select('SELECT * FROM mobile_devices %s' % where, args=args) + result = db.select("SELECT * FROM mobile_devices %s" % where, args=args) return result @@ -128,7 +128,7 @@ def get_mobile_device_config(mobile_device_id=None): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM mobile_devices WHERE id = ?', + result = db.select_single("SELECT * FROM mobile_devices WHERE id = ?", args=[mobile_device_id]) if result['onesignal_id'] == _ONESIGNAL_DISABLED: @@ -163,11 +163,11 @@ def delete_mobile_device(mobile_device_id=None, device_id=None): if mobile_device_id: logger.debug("Tautulli MobileApp :: Deleting mobile_device_id %s from the database." % mobile_device_id) - result = db.action('DELETE FROM mobile_devices WHERE id = ?', args=[mobile_device_id]) + result = db.action("DELETE FROM mobile_devices WHERE id = ?", args=[mobile_device_id]) return True elif device_id: logger.debug("Tautulli MobileApp :: Deleting device_id %s from the database." % device_id) - result = db.action('DELETE FROM mobile_devices WHERE device_id = ?', args=[device_id]) + result = db.action("DELETE FROM mobile_devices WHERE device_id = ?", args=[device_id]) return True else: return False @@ -179,9 +179,9 @@ def set_official(device_id, onesignal_id): platform = 'android' if official > 0 else None try: - result = db.action('UPDATE mobile_devices ' - 'SET official = ?, platform = coalesce(platform, ?) ' - 'WHERE device_id = ?', + result = db.action("UPDATE mobile_devices " + "SET official = ?, platform = coalesce(platform, ?) " + "WHERE device_id = ?", args=[official, platform, device_id]) except Exception as e: logger.warn("Tautulli MobileApp :: Failed to set official flag for device: %s." % e) @@ -193,7 +193,7 @@ def set_last_seen(device_token=None): last_seen = helpers.timestamp() try: - result = db.action('UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?', + result = db.action("UPDATE mobile_devices SET last_seen = ? WHERE device_token = ?", args=[last_seen, device_token]) except Exception as e: logger.warn("Tautulli MobileApp :: Failed to set last_seen time for device: %s." % e) diff --git a/plexpy/newsletter_handler.py b/plexpy/newsletter_handler.py index 8458e144..471a5984 100644 --- a/plexpy/newsletter_handler.py +++ b/plexpy/newsletter_handler.py @@ -181,9 +181,9 @@ def set_notify_success(newsletter_log_id): def get_last_newsletter_email_msg_id(newsletter_id, notify_action): db = database.MonitorDatabase() - result = db.select_single('SELECT email_msg_id FROM newsletter_log ' - 'WHERE newsletter_id = ? AND notify_action = ? AND success = 1 ' - 'ORDER BY timestamp DESC LIMIT 1', [newsletter_id, notify_action]) + result = db.select_single("SELECT email_msg_id FROM newsletter_log " + "WHERE newsletter_id = ? AND notify_action = ? AND success = 1 " + "ORDER BY timestamp DESC LIMIT 1", [newsletter_id, notify_action]) if result: return result['email_msg_id'] @@ -193,13 +193,13 @@ def get_newsletter(newsletter_uuid=None, newsletter_id_name=None): db = database.MonitorDatabase() if newsletter_uuid: - result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log ' - 'WHERE uuid = ?', [newsletter_uuid]) + result = db.select_single("SELECT start_date, end_date, uuid, filename FROM newsletter_log " + "WHERE uuid = ?", [newsletter_uuid]) elif newsletter_id_name: - result = db.select_single('SELECT start_date, end_date, uuid, filename FROM newsletter_log ' - 'JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id ' - 'WHERE id_name = ? AND notify_action != "test" ' - 'ORDER BY timestamp DESC LIMIT 1', [newsletter_id_name]) + result = db.select_single("SELECT start_date, end_date, uuid, filename FROM newsletter_log " + "JOIN newsletters ON newsletters.id = newsletter_log.newsletter_id " + "WHERE id_name = ? AND notify_action != 'test' " + "ORDER BY timestamp DESC LIMIT 1", [newsletter_id_name]) else: result = None diff --git a/plexpy/newsletters.py b/plexpy/newsletters.py index 59663fe2..94f73c8f 100644 --- a/plexpy/newsletters.py +++ b/plexpy/newsletters.py @@ -117,15 +117,15 @@ def get_newsletters(newsletter_id=None): args = [] if newsletter_id: - where = 'WHERE ' + where = "WHERE " if newsletter_id: - where_id += 'id = ?' + where_id += "id = ?" args.append(newsletter_id) - where += ' AND '.join([w for w in [where_id] if w]) + where += " AND ".join([w for w in [where_id] if w]) db = database.MonitorDatabase() - result = db.select('SELECT id, agent_id, agent_name, agent_label, ' - 'friendly_name, cron, active FROM newsletters %s' % where, args=args) + result = db.select("SELECT id, agent_id, agent_name, agent_label, " + "friendly_name, cron, active FROM newsletters %s" % where, args=args) return result @@ -136,7 +136,7 @@ def delete_newsletter(newsletter_id=None): if str(newsletter_id).isdigit(): logger.debug("Tautulli Newsletters :: Deleting newsletter_id %s from the database." % newsletter_id) - result = db.action('DELETE FROM newsletters WHERE id = ?', args=[newsletter_id]) + result = db.action("DELETE FROM newsletters WHERE id = ?", args=[newsletter_id]) return True else: return False @@ -151,7 +151,7 @@ def get_newsletter_config(newsletter_id=None, mask_passwords=False): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM newsletters WHERE id = ?', args=[newsletter_id]) + result = db.select_single("SELECT * FROM newsletters WHERE id = ?", args=[newsletter_id]) if not result: return None @@ -309,7 +309,7 @@ def send_newsletter(newsletter_id=None, subject=None, body=None, message=None, n def blacklist_logger(): db = database.MonitorDatabase() - notifiers = db.select('SELECT newsletter_config, email_config FROM newsletters') + notifiers = db.select("SELECT newsletter_config, email_config FROM newsletters") for n in notifiers: config = json.loads(n['newsletter_config'] or '{}') @@ -346,7 +346,7 @@ def generate_newsletter_uuid(): while not uuid or uuid_exists: uuid = plexpy.generate_uuid()[:8] result = db.select_single( - 'SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists', [uuid]) + "SELECT EXISTS(SELECT uuid FROM newsletter_log WHERE uuid = ?) as uuid_exists", [uuid]) uuid_exists = result['uuid_exists'] return uuid diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index 2171d2bd..7dd81627 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -443,12 +443,12 @@ def notify(notifier_id=None, notify_action=None, stream_data=None, timeline_data def get_notify_state(session): monitor_db = database.MonitorDatabase() - result = monitor_db.select('SELECT timestamp, notify_action, notifier_id ' - 'FROM notify_log ' - 'WHERE session_key = ? ' - 'AND rating_key = ? ' - 'AND user_id = ? ' - 'ORDER BY id DESC', + result = monitor_db.select("SELECT timestamp, notify_action, notifier_id " + "FROM notify_log " + "WHERE session_key = ? " + "AND rating_key = ? " + "AND user_id = ? " + "ORDER BY id DESC", args=[session['session_key'], session['rating_key'], session['user_id']]) notify_states = [] for item in result: @@ -467,16 +467,16 @@ def get_notify_state_enabled(session, notify_action, notified=True): timestamp_where = 'AND timestamp IS NULL' monitor_db = database.MonitorDatabase() - result = monitor_db.select('SELECT id AS notifier_id, timestamp ' - 'FROM notifiers ' - 'LEFT OUTER JOIN (' - 'SELECT timestamp, notifier_id ' - 'FROM notify_log ' - 'WHERE session_key = ? ' - 'AND rating_key = ? ' - 'AND user_id = ? ' - 'AND notify_action = ?) AS t ON notifiers.id = t.notifier_id ' - 'WHERE %s = 1 %s' % (notify_action, timestamp_where), + result = monitor_db.select("SELECT id AS notifier_id, timestamp " + "FROM notifiers " + "LEFT OUTER JOIN (" + "SELECT timestamp, notifier_id " + "FROM notify_log " + "WHERE session_key = ? " + "AND rating_key = ? " + "AND user_id = ? " + "AND notify_action = ?) AS t ON notifiers.id = t.notifier_id " + "WHERE %s = 1 %s" % (notify_action, timestamp_where), args=[session['session_key'], session['rating_key'], session['user_id'], notify_action]) return result @@ -528,8 +528,8 @@ def set_notify_success(notification_id): def check_nofity_tag(notify_action, tag): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM notify_log ' - 'WHERE notify_action = ? AND tag = ?', + result = monitor_db.select_single("SELECT * FROM notify_log " + "WHERE notify_action = ? AND tag = ?", [notify_action, tag]) return bool(result) @@ -1631,7 +1631,7 @@ def set_hash_image_info(img=None, rating_key=None, width=750, height=1000, def get_hash_image_info(img_hash=None): db = database.MonitorDatabase() - query = 'SELECT * FROM image_hash_lookup WHERE img_hash = ?' + query = "SELECT * FROM image_hash_lookup WHERE img_hash = ?" result = db.select_single(query, args=[img_hash]) return result @@ -1640,8 +1640,8 @@ def lookup_tvmaze_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, title=No db = database.MonitorDatabase() try: - query = 'SELECT imdb_id, tvmaze_id, tvmaze_url FROM tvmaze_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT imdb_id, tvmaze_id, tvmaze_url FROM tvmaze_lookup " \ + "WHERE rating_key = ?" tvmaze_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_tvmaze_by_tvdb_id: %s." % e) @@ -1700,8 +1700,8 @@ def lookup_themoviedb_by_id(rating_key=None, thetvdb_id=None, imdb_id=None, titl db = database.MonitorDatabase() try: - query = 'SELECT thetvdb_id, imdb_id, themoviedb_id, themoviedb_url FROM themoviedb_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT thetvdb_id, imdb_id, themoviedb_id, themoviedb_url FROM themoviedb_lookup " \ + "WHERE rating_key = ?" themoviedb_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_themoviedb_by_imdb_id: %s." % e) @@ -1778,8 +1778,8 @@ def get_themoviedb_info(rating_key=None, media_type=None, themoviedb_id=None): db = database.MonitorDatabase() try: - query = 'SELECT themoviedb_json FROM themoviedb_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT themoviedb_json FROM themoviedb_lookup " \ + "WHERE rating_key = ?" result = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for get_themoviedb_info: %s." % e) @@ -1829,8 +1829,8 @@ def lookup_musicbrainz_info(musicbrainz_type=None, rating_key=None, artist=None, db = database.MonitorDatabase() try: - query = 'SELECT musicbrainz_id, musicbrainz_url, musicbrainz_type FROM musicbrainz_lookup ' \ - 'WHERE rating_key = ?' + query = "SELECT musicbrainz_id, musicbrainz_url, musicbrainz_type FROM musicbrainz_lookup " \ + "WHERE rating_key = ?" musicbrainz_info = db.select_single(query, args=[rating_key]) except Exception as e: logger.warn("Tautulli NotificationHandler :: Unable to execute database query for lookup_musicbrainz: %s." % e) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index c7580603..a2fa6341 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -507,7 +507,7 @@ def get_notifiers(notifier_id=None, notify_action=None): where += ' AND '.join([w for w in [where_id, where_action] if w]) db = database.MonitorDatabase() - result = db.select('SELECT id, agent_id, agent_name, agent_label, friendly_name, %s FROM notifiers %s' + result = db.select("SELECT id, agent_id, agent_name, agent_label, friendly_name, %s FROM notifiers %s" % (', '.join(notify_actions), where), args=args) for item in result: @@ -522,7 +522,7 @@ def delete_notifier(notifier_id=None): if str(notifier_id).isdigit(): logger.debug("Tautulli Notifiers :: Deleting notifier_id %s from the database." % notifier_id) - result = db.action('DELETE FROM notifiers WHERE id = ?', args=[notifier_id]) + result = db.action("DELETE FROM notifiers WHERE id = ?", args=[notifier_id]) return True else: return False @@ -537,7 +537,7 @@ def get_notifier_config(notifier_id=None, mask_passwords=False): return None db = database.MonitorDatabase() - result = db.select_single('SELECT * FROM notifiers WHERE id = ?', args=[notifier_id]) + result = db.select_single("SELECT * FROM notifiers WHERE id = ?", args=[notifier_id]) if not result: return None @@ -3866,8 +3866,8 @@ class TAUTULLIREMOTEAPP(Notifier): db = database.MonitorDatabase() try: - query = 'SELECT * FROM mobile_devices WHERE official = 1 ' \ - 'AND onesignal_id IS NOT NULL AND onesignal_id != ""' + query = "SELECT * FROM mobile_devices WHERE official = 1 " \ + "AND onesignal_id IS NOT NULL AND onesignal_id != ''" return db.select(query=query) except Exception as e: logger.warn("Tautulli Notifiers :: Unable to retrieve Tautulli Remote app devices list: %s." % e) @@ -4472,8 +4472,8 @@ def check_browser_enabled(): def get_browser_notifications(): db = database.MonitorDatabase() - result = db.select('SELECT notifier_id, subject_text, body_text FROM notify_log ' - 'WHERE agent_id = 17 AND timestamp >= ? ', + result = db.select("SELECT notifier_id, subject_text, body_text FROM notify_log " + "WHERE agent_id = 17 AND timestamp >= ? ", args=[time.time() - 5]) notifications = [] diff --git a/plexpy/plexivity_import.py b/plexpy/plexivity_import.py index 3e5350bc..644782a2 100644 --- a/plexpy/plexivity_import.py +++ b/plexpy/plexivity_import.py @@ -304,27 +304,27 @@ def import_from_plexivity(database_file=None, table_name=None, import_ignore_int logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.") return None - query = 'SELECT id AS id, ' \ - 'time AS started, ' \ - 'stopped, ' \ - 'null AS user_id, ' \ - 'user, ' \ - 'ip_address, ' \ - 'paused_counter, ' \ - 'platform AS player, ' \ - 'null AS platform, ' \ - 'null as machine_id, ' \ - 'null AS media_type, ' \ - 'null AS view_offset, ' \ - 'xml, ' \ - 'rating as content_rating,' \ - 'summary,' \ - 'title AS full_title,' \ - '(case when orig_title_ep = "n/a" then orig_title else ' \ - 'orig_title_ep end) as title,' \ - '(case when orig_title_ep != "n/a" then orig_title else ' \ - 'null end) as grandparent_title ' \ - 'FROM ' + table_name + ' ORDER BY id' + query = "SELECT id AS id, " \ + "time AS started, " \ + "stopped, " \ + "null AS user_id, " \ + "user, " \ + "ip_address, " \ + "paused_counter, " \ + "platform AS player, " \ + "null AS platform, " \ + "null as machine_id, " \ + "null AS media_type, " \ + "null AS view_offset, " \ + "xml, " \ + "rating as content_rating," \ + "summary," \ + "title AS full_title," \ + "(case when orig_title_ep = 'n/a' then orig_title else " \ + "orig_title_ep end) as title," \ + "(case when orig_title_ep != 'n/a' then orig_title else " \ + "null end) as grandparent_title " \ + "FROM " + table_name + " ORDER BY id" result = connection.execute(query) @@ -456,9 +456,9 @@ def import_users(): logger.debug("Tautulli Importer :: Importing Plexivity Users...") monitor_db = database.MonitorDatabase() - query = 'INSERT OR IGNORE INTO users (user_id, username) ' \ - 'SELECT user_id, user ' \ - 'FROM session_history WHERE user_id != 1 GROUP BY user_id' + query = "INSERT OR IGNORE INTO users (user_id, username) " \ + "SELECT user_id, user " \ + "FROM session_history WHERE user_id != 1 GROUP BY user_id" try: monitor_db.action(query) diff --git a/plexpy/plexwatch_import.py b/plexpy/plexwatch_import.py index ac0fd7b0..4d8ec80b 100644 --- a/plexpy/plexwatch_import.py +++ b/plexpy/plexwatch_import.py @@ -295,29 +295,29 @@ def import_from_plexwatch(database_file=None, table_name=None, import_ignore_int logger.debug("Tautulli Importer :: Unable to refresh the users list. Aborting import.") return None - query = 'SELECT time AS started, ' \ - 'stopped, ' \ - 'cast(ratingKey as text) AS rating_key, ' \ - 'null AS user_id, ' \ - 'user, ' \ - 'ip_address, ' \ - 'paused_counter, ' \ - 'platform AS player, ' \ - 'null AS platform, ' \ - 'null as machine_id, ' \ - 'parentRatingKey as parent_rating_key, ' \ - 'grandparentRatingKey as grandparent_rating_key, ' \ - 'null AS media_type, ' \ - 'null AS view_offset, ' \ - 'xml, ' \ - 'rating as content_rating,' \ - 'summary,' \ - 'title AS full_title,' \ - '(case when orig_title_ep = "" then orig_title else ' \ - 'orig_title_ep end) as title,' \ - '(case when orig_title_ep != "" then orig_title else ' \ - 'null end) as grandparent_title ' \ - 'FROM ' + table_name + ' ORDER BY id' + query = "SELECT time AS started, " \ + "stopped, " \ + "cast(ratingKey as text) AS rating_key, " \ + "null AS user_id, " \ + "user, " \ + "ip_address, " \ + "paused_counter, " \ + "platform AS player, " \ + "null AS platform, " \ + "null as machine_id, " \ + "parentRatingKey as parent_rating_key, " \ + "grandparentRatingKey as grandparent_rating_key, " \ + "null AS media_type, " \ + "null AS view_offset, " \ + "xml, " \ + "rating as content_rating," \ + "summary," \ + "title AS full_title," \ + "(case when orig_title_ep = '' then orig_title else " \ + "orig_title_ep end) as title," \ + "(case when orig_title_ep != '' then orig_title else " \ + "null end) as grandparent_title " \ + "FROM " + table_name + " ORDER BY id" result = connection.execute(query) @@ -450,9 +450,9 @@ def import_users(): logger.debug("Tautulli Importer :: Importing PlexWatch Users...") monitor_db = database.MonitorDatabase() - query = 'INSERT OR IGNORE INTO users (user_id, username) ' \ - 'SELECT user_id, user ' \ - 'FROM session_history WHERE user_id != 1 GROUP BY user_id' + query = "INSERT OR IGNORE INTO users (user_id, username) " \ + "SELECT user_id, user " \ + "FROM session_history WHERE user_id != 1 GROUP BY user_id" try: monitor_db.action(query) diff --git a/plexpy/users.py b/plexpy/users.py index 5ec093ff..0e201791 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -75,8 +75,8 @@ def refresh_users(): # Check if we've set a custom avatar if so don't overwrite it. if keys_dict['user_id']: - avatar_urls = monitor_db.select('SELECT thumb, custom_avatar_url ' - 'FROM users WHERE user_id = ?', + avatar_urls = monitor_db.select("SELECT thumb, custom_avatar_url " + "FROM users WHERE user_id = ?", [keys_dict['user_id']]) if avatar_urls: if not avatar_urls[0]['custom_avatar_url'] or \ @@ -98,7 +98,7 @@ def refresh_users(): if result == 'insert': new_users.append(item['username']) - query = 'UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})'.format(', '.join(['?'] * len(user_ids))) + query = "UPDATE users SET is_active = 0 WHERE user_id NOT IN ({})".format(", ".join(["?"] * len(user_ids))) monitor_db.action(query=query, args=user_ids) # Add new users to loger username filter @@ -137,43 +137,43 @@ class Users(object): group_by = 'session_history.reference_id' if grouping else 'session_history.id' - columns = ['users.id AS row_id', - 'users.user_id', - 'users.username', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name', - 'users.title', - 'users.email', - 'users.thumb AS user_thumb', - 'users.custom_avatar_url AS custom_thumb', - 'COUNT(DISTINCT %s) AS plays' % group_by, - 'SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ + columns = ["users.id AS row_id", + "users.user_id", + "users.username", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name", + "users.title", + "users.email", + "users.thumb AS user_thumb", + "users.custom_avatar_url AS custom_thumb", + "COUNT(DISTINCT %s) AS plays" % group_by, + "SUM(CASE WHEN session_history.stopped > 0 THEN (session_history.stopped - session_history.started) \ ELSE 0 END) - SUM(CASE WHEN session_history.paused_counter IS NULL THEN 0 ELSE \ - session_history.paused_counter END) AS duration', - 'MAX(session_history.started) AS last_seen', - 'MAX(session_history.id) AS history_row_id', - 'session_history_metadata.full_title AS last_played', - 'session_history.ip_address', - 'session_history.platform', - 'session_history.player', - 'session_history.rating_key', - 'session_history_metadata.media_type', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'session_history_media_info.transcode_decision', - 'users.do_notify AS do_notify', - 'users.keep_history AS keep_history', - 'users.allow_guest AS allow_guest', - 'users.is_active AS is_active' + session_history.paused_counter END) AS duration", + "MAX(session_history.started) AS last_seen", + "MAX(session_history.id) AS history_row_id", + "session_history_metadata.full_title AS last_played", + "session_history.ip_address", + "session_history.platform", + "session_history.player", + "session_history.rating_key", + "session_history_metadata.media_type", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "session_history_media_info.transcode_decision", + "users.do_notify AS do_notify", + "users.keep_history AS keep_history", + "users.allow_guest AS allow_guest", + "users.is_active AS is_active" ] try: query = data_tables.ssp_query(table_name='users', @@ -270,32 +270,32 @@ class Users(object): custom_where = ['users.user_id', user_id] - columns = ['session_history.id AS history_row_id', - 'MIN(session_history.started) AS first_seen', - 'MAX(session_history.started) AS last_seen', - 'session_history.ip_address', - 'COUNT(session_history.id) AS play_count', - 'session_history.platform', - 'session_history.player', - 'session_history.rating_key', - 'session_history_metadata.full_title AS last_played', - 'session_history_metadata.thumb', - 'session_history_metadata.parent_thumb', - 'session_history_metadata.grandparent_thumb', - 'session_history_metadata.media_type', - 'session_history_metadata.parent_title', - 'session_history_metadata.year', - 'session_history_metadata.media_index', - 'session_history_metadata.parent_media_index', - 'session_history_metadata.live', - 'session_history_metadata.added_at', - 'session_history_metadata.originally_available_at', - 'session_history_metadata.guid', - 'session_history_media_info.transcode_decision', - 'session_history.user', - 'session_history.user_id as custom_user_id', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name' + columns = ["session_history.id AS history_row_id", + "MIN(session_history.started) AS first_seen", + "MAX(session_history.started) AS last_seen", + "session_history.ip_address", + "COUNT(session_history.id) AS play_count", + "session_history.platform", + "session_history.player", + "session_history.rating_key", + "session_history_metadata.full_title AS last_played", + "session_history_metadata.thumb", + "session_history_metadata.parent_thumb", + "session_history_metadata.grandparent_thumb", + "session_history_metadata.media_type", + "session_history_metadata.parent_title", + "session_history_metadata.year", + "session_history_metadata.media_index", + "session_history_metadata.parent_media_index", + "session_history_metadata.live", + "session_history_metadata.added_at", + "session_history_metadata.originally_available_at", + "session_history_metadata.guid", + "session_history_media_info.transcode_decision", + "session_history.user", + "session_history.user_id as custom_user_id", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name" ] try: @@ -436,31 +436,31 @@ class Users(object): last_seen = 'NULL' join = '' if include_last_seen: - last_seen = 'MAX(session_history.started)' - join = 'LEFT OUTER JOIN session_history ON users.user_id = session_history.user_id' + last_seen = "MAX(session_history.started)" + join = "LEFT OUTER JOIN session_history ON users.user_id = session_history.user_id" monitor_db = database.MonitorDatabase() try: if str(user_id).isdigit(): - where = 'users.user_id = ?' + where = "users.user_id = ?" args = [user_id] elif user: - where = 'users.username = ?' + where = "users.username = ?" args = [user] elif email: - where = 'users.email = ?' + where = "users.email = ?" args = [email] else: - raise Exception('Missing user_id, username, or email') + raise Exception("Missing user_id, username, or email") - query = 'SELECT users.id AS row_id, users.user_id, username, friendly_name, ' \ - 'thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ - 'email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \ - 'do_notify, keep_history, deleted_user, ' \ - 'allow_guest, shared_libraries, %s AS last_seen ' \ - 'FROM users %s ' \ - 'WHERE %s COLLATE NOCASE' % (last_seen, join, where) + query = "SELECT users.id AS row_id, users.user_id, username, friendly_name, " \ + "thumb AS user_thumb, custom_avatar_url AS custom_thumb, " \ + "email, is_active, is_admin, is_home_user, is_allow_sync, is_restricted, " \ + "do_notify, keep_history, deleted_user, " \ + "allow_guest, shared_libraries, %s AS last_seen " \ + "FROM users %s " \ + "WHERE %s COLLATE NOCASE" % (last_seen, join, where) result = monitor_db.select(query, args=args) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_user_details: %s." % e) @@ -531,22 +531,22 @@ class Users(object): try: if days > 0: if str(user_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - ' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE stopped >= %s ' \ - 'AND user_id = ? ' % (group_by, timestamp_query) + query = "SELECT (SUM(stopped - started) - " \ + " SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "WHERE stopped >= %s " \ + "AND user_id = ? " % (group_by, timestamp_query) result = monitor_db.select(query, args=[user_id]) else: result = [] else: if str(user_id).isdigit(): - query = 'SELECT (SUM(stopped - started) - ' \ - ' SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'COUNT(DISTINCT %s) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' % group_by + query = "SELECT (SUM(stopped - started) - " \ + " SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "COUNT(DISTINCT %s) AS total_plays " \ + "FROM session_history " \ + "WHERE user_id = ? " % group_by result = monitor_db.select(query, args=[user_id]) else: result = [] @@ -587,13 +587,13 @@ class Users(object): try: if str(user_id).isdigit(): - query = 'SELECT player, COUNT(DISTINCT %s) as total_plays, (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, ' \ - 'platform ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY player ' \ - 'ORDER BY total_plays DESC, total_time DESC' % group_by + query = "SELECT player, COUNT(DISTINCT %s) as total_plays, (SUM(stopped - started) - " \ + "SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END)) AS total_time, " \ + "platform " \ + "FROM session_history " \ + "WHERE user_id = ? " \ + "GROUP BY player " \ + "ORDER BY total_plays DESC, total_time DESC" % group_by result = monitor_db.select(query, args=[user_id]) else: result = [] @@ -630,17 +630,17 @@ class Users(object): try: if str(user_id).isdigit(): - query = 'SELECT session_history.id, session_history.media_type, guid, ' \ - 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, ' \ - 'title, parent_title, grandparent_title, original_title, ' \ - 'thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, originally_available_at, added_at, live, started, user ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ - ' ELSE session_history.rating_key END) ' \ - 'ORDER BY MAX(started) DESC LIMIT ?' + query = "SELECT session_history.id, session_history.media_type, guid, " \ + "session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key, " \ + "title, parent_title, grandparent_title, original_title, " \ + "thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, " \ + "year, originally_available_at, added_at, live, started, user " \ + "FROM session_history_metadata " \ + "JOIN session_history ON session_history_metadata.id = session_history.id " \ + "WHERE user_id = ? " \ + "GROUP BY (CASE WHEN session_history.media_type = 'track' THEN session_history.parent_rating_key " \ + " ELSE session_history.rating_key END) " \ + "ORDER BY MAX(started) DESC LIMIT ?" result = monitor_db.select(query, args=[user_id, limit]) else: result = [] @@ -683,11 +683,11 @@ class Users(object): monitor_db = database.MonitorDatabase() try: - query = 'SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, ' \ - 'is_active, is_admin, is_home_user, is_allow_sync, is_restricted, ' \ - 'do_notify, keep_history, allow_guest, shared_libraries, ' \ - 'filter_all, filter_movies, filter_tv, filter_music, filter_photos ' \ - 'FROM users WHERE deleted_user = 0' + query = "SELECT id AS row_id, user_id, username, friendly_name, thumb, custom_avatar_url, email, " \ + "is_active, is_admin, is_home_user, is_allow_sync, is_restricted, " \ + "do_notify, keep_history, allow_guest, shared_libraries, " \ + "filter_all, filter_movies, filter_tv, filter_music, filter_photos " \ + "FROM users WHERE deleted_user = 0" result = monitor_db.select(query=query) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_users: %s." % e) @@ -729,8 +729,8 @@ class Users(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) # Get the user_ids corresponding to the row_ids - result = monitor_db.select('SELECT user_id FROM users ' - 'WHERE id IN ({})'.format(','.join(['?'] * len(row_ids))), row_ids) + result = monitor_db.select("SELECT user_id FROM users " + "WHERE id IN ({})".format(",".join(["?"] * len(row_ids))), row_ids) success = [] for user in result: @@ -747,9 +747,9 @@ class Users(object): logger.info("Tautulli Users :: Deleting user with user_id %s from database." % user_id) try: - monitor_db.action('UPDATE users ' - 'SET deleted_user = 1, keep_history = 0, do_notify = 0 ' - 'WHERE user_id = ?', [user_id]) + monitor_db.action("UPDATE users " + "SET deleted_user = 1, keep_history = 0, do_notify = 0 " + "WHERE user_id = ?", [user_id]) return delete_success except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for delete: %s." % e) @@ -762,25 +762,25 @@ class Users(object): try: if user_id and str(user_id).isdigit(): - query = 'SELECT * FROM users WHERE user_id = ?' + query = "SELECT * FROM users WHERE user_id = ?" result = monitor_db.select(query=query, args=[user_id]) if result: logger.info("Tautulli Users :: Re-adding user with id %s to database." % user_id) - monitor_db.action('UPDATE users ' - 'SET deleted_user = 0, keep_history = 1, do_notify = 1 ' - 'WHERE user_id = ?', [user_id]) + monitor_db.action("UPDATE users " + "SET deleted_user = 0, keep_history = 1, do_notify = 1 " + "WHERE user_id = ?", [user_id]) return True else: return False elif username: - query = 'SELECT * FROM users WHERE username = ?' + query = "SELECT * FROM users WHERE username = ?" result = monitor_db.select(query=query, args=[username]) if result: logger.info("Tautulli Users :: Re-adding user with username %s to database." % username) - monitor_db.action('UPDATE users ' - 'SET deleted_user = 0, keep_history = 1, do_notify = 1 ' - 'WHERE username = ?', [username]) + monitor_db.action("UPDATE users " + "SET deleted_user = 0, keep_history = 1, do_notify = 1 " + "WHERE username = ?", [username]) return True else: return False @@ -793,7 +793,7 @@ class Users(object): if user: try: monitor_db = database.MonitorDatabase() - query = 'SELECT user_id FROM users WHERE username = ?' + query = "SELECT user_id FROM users WHERE username = ?" result = monitor_db.select_single(query, args=[user]) if result: return result['user_id'] @@ -809,14 +809,14 @@ class Users(object): user_cond = '' if session.get_session_user_id(): - user_cond = 'AND user_id = %s ' % session.get_session_user_id() + user_cond = "AND user_id = %s " % session.get_session_user_id() try: - query = 'SELECT user_id, ' \ - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name ' \ - 'FROM users ' \ - 'WHERE deleted_user = 0 %s' % user_cond + query = "SELECT user_id, " \ + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name " \ + "FROM users " \ + "WHERE deleted_user = 0 %s" % user_cond result = monitor_db.select(query) except Exception as e: @@ -835,8 +835,8 @@ class Users(object): if user_id: try: monitor_db = database.MonitorDatabase() - query = 'SELECT allow_guest, user_token, server_token FROM users ' \ - 'WHERE user_id = ? AND deleted_user = 0' + query = "SELECT allow_guest, user_token, server_token FROM users " \ + "WHERE user_id = ? AND deleted_user = 0" result = monitor_db.select_single(query, args=[user_id]) if result: tokens = {'allow_guest': result['allow_guest'], @@ -857,8 +857,8 @@ class Users(object): try: monitor_db = database.MonitorDatabase() - query = 'SELECT filter_all, filter_movies, filter_tv, filter_music, filter_photos FROM users ' \ - 'WHERE user_id = ?' + query = "SELECT filter_all, filter_movies, filter_tv, filter_music, filter_photos FROM users " \ + "WHERE user_id = ?" result = monitor_db.select_single(query, args=[user_id]) except Exception as e: logger.warn("Tautulli Users :: Unable to execute database query for get_filters: %s." % e) @@ -907,8 +907,8 @@ class Users(object): def get_user_login(self, jwt_token): monitor_db = database.MonitorDatabase() - result = monitor_db.select_single('SELECT * FROM user_login ' - 'WHERE jwt_token = ?', + result = monitor_db.select_single("SELECT * FROM user_login " + "WHERE jwt_token = ?", [jwt_token]) return result @@ -918,8 +918,8 @@ class Users(object): if jwt_token: logger.debug("Tautulli Users :: Clearing user JWT token.") try: - monitor_db.action('UPDATE user_login SET jwt_token = NULL ' - 'WHERE jwt_token = ?', + monitor_db.action("UPDATE user_login SET jwt_token = NULL " + "WHERE jwt_token = ?", [jwt_token]) except Exception as e: logger.error("Tautulli Users :: Unable to clear user JWT token: %s.", e) @@ -929,8 +929,8 @@ class Users(object): row_ids = list(map(helpers.cast_to_int, row_ids.split(','))) logger.debug("Tautulli Users :: Clearing JWT tokens for row_ids %s.", row_ids) try: - monitor_db.action('UPDATE user_login SET jwt_token = NULL ' - 'WHERE id in ({})'.format(','.join(['?'] * len(row_ids))), + monitor_db.action("UPDATE user_login SET jwt_token = NULL " + "WHERE id in ({})".format(",".join(["?"] * len(row_ids))), row_ids) except Exception as e: logger.error("Tautulli Users :: Unable to clear JWT tokens: %s.", e) @@ -954,19 +954,19 @@ class Users(object): else: custom_where = [['user_login.user_id', user_id]] if user_id else [] - columns = ['user_login.id AS row_id', - 'user_login.timestamp', - 'user_login.user_id', - 'user_login.user', - 'user_login.user_group', - 'user_login.ip_address', - 'user_login.host', - 'user_login.user_agent', - 'user_login.success', - 'user_login.expiry', - 'user_login.jwt_token', - '(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = "" \ - THEN users.username ELSE users.friendly_name END) AS friendly_name' + columns = ["user_login.id AS row_id", + "user_login.timestamp", + "user_login.user_id", + "user_login.user", + "user_login.user_group", + "user_login.ip_address", + "user_login.host", + "user_login.user_agent", + "user_login.success", + "user_login.expiry", + "user_login.jwt_token", + "(CASE WHEN users.friendly_name IS NULL OR TRIM(users.friendly_name) = '' \ + THEN users.username ELSE users.friendly_name END) AS friendly_name" ] try: diff --git a/plexpy/webauth.py b/plexpy/webauth.py index 50ce2d2c..d105a8c2 100644 --- a/plexpy/webauth.py +++ b/plexpy/webauth.py @@ -99,7 +99,7 @@ def plex_user_login(token=None, headers=None): try: logger.debug("Tautulli WebAuth :: Registering token for user '%s' in the database." % user_details['username']) - result = monitor_db.action('UPDATE users SET server_token = ? WHERE user_id = ?', + result = monitor_db.action("UPDATE users SET server_token = ? WHERE user_id = ?", [server_token, user_details['user_id']]) if result: @@ -246,12 +246,12 @@ def all_of(*conditions): def check_rate_limit(ip_address): monitor_db = MonitorDatabase() - result = monitor_db.select('SELECT timestamp, success FROM user_login ' - 'WHERE ip_address = ? ' - 'AND timestamp >= ( ' - 'SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END ' - 'FROM user_login WHERE ip_address = ? AND success = 1) ' - 'ORDER BY timestamp DESC', + result = monitor_db.select("SELECT timestamp, success FROM user_login " + "WHERE ip_address = ? " + "AND timestamp >= ( " + "SELECT CASE WHEN MAX(timestamp) IS NULL THEN 0 ELSE MAX(timestamp) END " + "FROM user_login WHERE ip_address = ? AND success = 1) " + "ORDER BY timestamp DESC", [ip_address, ip_address]) try: