diff --git a/data/interfaces/default/base.html b/data/interfaces/default/base.html index e756fd4e..fbfad400 100644 --- a/data/interfaces/default/base.html +++ b/data/interfaces/default/base.html @@ -174,37 +174,42 @@ from plexpy import version - % if title=="Home": + % if title == "Home":
  • % else:
  • % endif - % if title=="Users" or title=="User": + % if title == "Libraries" or title == "Library" or title == "Info": +
  • Libraries
  • + % else: +
  • Libraries
  • + % endif + % if title == "Users" or title == "User":
  • Users
  • % else:
  • Users
  • % endif - % if title=="History": + % if title == "History":
  • History
  • % else:
  • History
  • % endif - % if title=="Graphs": + % if title == "Graphs":
  • Graphs
  • % else:
  • Graphs
  • % endif - % if title=="Synced Items": + % if title == "Synced Items":
  • Synced Items
  • % else:
  • Synced Items
  • % endif - % if title=="Log": + % if title == "Log":
  • Logs
  • % else:
  • Logs
  • % endif - % if title=="Settings": + % if title == "Settings":
  • Settings
  • % else:
  • Settings
  • diff --git a/data/interfaces/default/css/plexpy.css b/data/interfaces/default/css/plexpy.css index 7c20f941..d6031749 100644 --- a/data/interfaces/default/css/plexpy.css +++ b/data/interfaces/default/css/plexpy.css @@ -498,6 +498,16 @@ textarea.form-control:focus { -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); } +.libraries-poster-face { + overflow: hidden; + float: left; + background-size: contain; + height: 40px; + width: 40px; + /*-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);*/ +} a .poster-face:hover, a .cover-face:hover, a .users-poster-face:hover { @@ -1195,6 +1205,7 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span { width: 250px; height: 1px; margin: 0 40px 20px 25px; + position: relative; } .summary-content { position: relative; @@ -1218,6 +1229,18 @@ a:hover .summary-poster-face-track .summary-poster-face-overlay span { margin-left: 2px; margin-right: 10px; } +.summary-content-media-info-wrapper { + width: 100%; + position: absolute; + bottom: 0; + left: 0; + text-align: center; +} +.summary-content-media-info { + max-width: 75pt; + max-height: 20px; + margin: 0 10px; +} .summary-content-summary { overflow: hidden; color: #fff; @@ -1654,7 +1677,6 @@ a:hover .item-children-poster { } .user-player-instance-box { float: left; - width: 75px; border-radius: 3px; -webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); @@ -1676,6 +1698,7 @@ a:hover .item-children-poster { font-weight: normal; width: 140px; margin-left: 10px; + margin-bottom: 10px; } .user-player-instance-playcount h3 { font-size: 30px; @@ -1695,6 +1718,35 @@ a:hover .item-children-poster { top: 15px; left: 0px; } +.library-info-poster-face { + float: left; + margin-top: 15px; + margin-right: 15px; + background-size: contain; + height: 80px; + width: 80px; + /*-webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1);*/ +} +.library-user-instance-box { + float: left; + -webkit-border-radius: 50%; + -moz-border-radius: 50%; + border-radius: 50%; + -webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + background-size: contain; + position: relative; + height: 80px; + width: 80px; +} +.library-user-instance-box:hover { + -webkit-box-shadow: inset 0 0 0 2px #e9a049; + -moz-box-shadow: inset 0 0 0 2px #e9a049; + box-shadow: inset 0 0 0 2px #e9a049; +} .home-platforms { } .home-platforms ul { @@ -2127,7 +2179,8 @@ a .home-platforms-instance-list-oval:hover, float: right; } .colvis-button-bar, -.refresh-users-button { +.refresh-users-button, +.refresh-libraries-button { float: right; } .nav-settings, @@ -2346,42 +2399,52 @@ a .home-platforms-instance-list-oval:hover, background: #fff; border: 0; font-weight: bold; + border-radius: 2px; } -.history-title .popover.right { - margin-left: 12px; - z-index: 5; +.history-thumbnail-popover { + z-index: 2; + padding: 0; + border: 0; } -.history-title .popover.right .popover-content { - padding: 5px 8px; +.history-thumbnail-popover.popover.right { + margin-left: 15px; +} +.history-thumbnail-popover .popover-content { + color: #000; + padding: 0; } .history-thumbnail { background-position: center; background-size: cover; width: 80px; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; + border-radius: 3px; + -webkit-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + -moz-box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); + box-shadow: 0 0 4px rgba(0,0,0,.3),inset 0 0 0 1px rgba(255,255,255,.1); } -.edit-user-toggles { +.edit-user-toggles, +.edit-library-toggles { padding-right: 10px; } -.edit-user-toggles > input[type='checkbox'] { +.edit-user-toggles > input[type='checkbox'], +.edit-library-toggles > input[type='checkbox'] { display: none; } -.edit-user-toggles > input[type='checkbox'] + label { +.edit-user-toggles > input[type='checkbox'] + label, +.edit-library-toggles > input[type='checkbox'] + label { color: #444; cursor: pointer; } -.edit-user-toggles > input[type='checkbox']:checked + label { +.edit-user-toggles > input[type='checkbox']:checked + label, +.edit-library-toggles > input[type='checkbox']:checked + label { color: #fff; cursor: pointer; } .edit-user-name > input[type='text'] { margin: 0; } -.popover { - z-index: 2; -} -.popover .popover-content { - color: #000; -} .noTransition { -moz-transition: none !important; @@ -2418,7 +2481,9 @@ a .home-platforms-instance-list-oval:hover, left: 12px; } #users-to-delete > li, -#users-to-purge > li { +#users-to-purge > li, +#libraries-to-delete > li, +#libraries-to-purge > li { color: #e9a049; } #updatebar { @@ -2468,8 +2533,7 @@ a .home-platforms-instance-list-oval:hover, right: 0; bottom: 0; left: 0; - overflow-x: hidden; - overflow-y: scroll; + overflow: auto; -webkit-overflow-scrolling: touch; } ::-webkit-scrollbar { @@ -2496,12 +2560,19 @@ a .home-platforms-instance-list-oval:hover, width: 100%; } } +table.display, +table.display tr.shown + tr table[id^='history_child'], +table.display tr.shown + tr table[id^='media_info_child'], +table.display tr.shown + tr table[id^='media_info_child'] tr.shown + tr table[id^='media_info_child'] { + table-layout: auto; +} table.display tr.shown + tr div.slider { display: none; } table.display tr.shown + tr > td { padding-top: 0; padding-bottom: 0; + padding-left: 0; } table.display tr.shown + tr:hover { background-color: rgba(255,255,255,0); @@ -2512,7 +2583,9 @@ table.display tr.shown + tr .pagination > .active > a, table.display tr.shown + tr .pagination > .active > a:hover { color: #fff; } -table.display tr.shown + tr table[id^='history_child'] td:hover a { +table.display tr.shown + tr table[id^='history_child'] td:hover a, +table.display tr.shown + tr table[id^='media_info_child'] > tr > td:hover a, +table.display tr.shown + tr table[id^='media_info_child'] tr.shown + tr table[id^='media_info_child'] td:hover a { color: #F9AA03; } table.display tr.shown + tr .pagination > .disabled > a { @@ -2523,14 +2596,22 @@ table.display tr.shown + tr .pagination > li > a:hover { } table[id^='history_child'] { margin-top: 0; - margin-left: -4px; opacity: .6; } -table[id^='history_child'] thead th { +table[id^='media_info_child'] { + margin-top: 0; +} +table[id^='history_child'] thead th, +table[id^='media_info_child'] thead th { line-height: 0; height: 0 !important; overflow: hidden; } +table[id^='media_info_child'] table[id^='media_info_child'] thead th { + line-height: 25px; + height: 35px !important; + overflow: hidden; +} #search_form { width: 300px; padding: 8px 15px; @@ -2564,7 +2645,6 @@ table[id^='history_child'] thead th { -o-transition: background 0.3s; transition: background 0.3s; } - .notification-params { margin-top: 10px; background-color: #282828; @@ -2589,7 +2669,6 @@ table[id^='history_child'] thead th { .notification-params tr:nth-child(even) td { background-color: rgba(255,255,255,0.010); } - #days-selection label { margin-bottom: 0; } @@ -2598,3 +2677,36 @@ table[id^='history_child'] thead th { width: 75px; height: 34px; } +.card-sortable { + height: 36px; + padding: 0 20px 0 0; + line-height: 34px; + cursor: move; + cursor: -webkit-grab; + cursor: grab; + border-bottom: 1px solid #232323; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + -o-user-select: none; + user-select: none; +} +.card { + position: relative; + background-color: #282828; + border-top: 1px solid #2d2d2d; +} +.card label { + font-weight: normal; +} +.card-handle { + display: inline-block; + width: 30px; + margin-right: 10px; + color: #444; + text-align: center; + background-color: #2f2f2f; +} +.selectize-input input[type='text'] { + height: 20px; +} \ No newline at end of file diff --git a/data/interfaces/default/current_activity.html b/data/interfaces/default/current_activity.html index 54d612fd..5b4ab21e 100644 --- a/data/interfaces/default/current_activity.html +++ b/data/interfaces/default/current_activity.html @@ -69,12 +69,12 @@ DOCUMENTATION :: END % for a in data['sessions']:
    % if a['media_type'] == 'movie' or a['media_type'] == 'episode' or a['media_type'] == 'track': - + % endif
    - % if a['media_type'] == 'movie' and not a['indexes']: + % if (a['media_type'] == 'movie' and not a['indexes']) or (a['indexes'] and not a['view_offset']):
    - % elif a['media_type'] == 'episode' and not a['indexes']: + % elif (a['media_type'] == 'episode' and not a['indexes']) or (a['indexes'] and not a['view_offset']):
    % elif a['indexes']: @@ -227,13 +227,13 @@ DOCUMENTATION :: END   % endif % if a['media_type'] == 'episode': -
    ${a['grandparent_title']} - ${a['title']} + ${a['grandparent_title']} - ${a['title']} % elif a['media_type'] == 'movie': - ${a['title']} + ${a['title']} % elif a['media_type'] == 'clip': ${a['title']} % elif a['media_type'] == 'track': - ${a['grandparent_title']} - ${a['title']} + ${a['grandparent_title']} - ${a['title']} % elif a['media_type'] == 'photo': ${a['parent_title']} % else: @@ -246,7 +246,7 @@ DOCUMENTATION :: END % elif a['media_type'] == 'movie': ${a['year']} % elif a['media_type'] == 'track': - ${a['parent_title']} + ${a['parent_title']} % elif a['media_type'] == 'photo': ${a['title']} % else: diff --git a/data/interfaces/default/edit_library.html b/data/interfaces/default/edit_library.html new file mode 100644 index 00000000..f3d86190 --- /dev/null +++ b/data/interfaces/default/edit_library.html @@ -0,0 +1,179 @@ +<%doc> +USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE + +For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/ + +Filename: edit_library.html +Version: 0.1 +Variable names: data [list] + +data :: Usable parameters + +== Global keys == +section_id Returns the library id of the library. +section_name Returns the name of the library. +section_type Returns the type of the library. +library_thumb Returns the thumbnail for the library. +custom_thumb Returns the custom thumbnail for the library. +library_art Returns the artwork for the library. +count Returns the item count for the library. +parent_count Returns the parent item count for the library. +child_count Returns the child item count for the library. +do_notify Returns bool value for whether to send notifications for the library. +keep_history Returns bool value for whether to keep history for the library. + +DOCUMENTATION :: END + + +<%! + from plexpy import helpers +%> + +% if data != None: + + + +% endif \ No newline at end of file diff --git a/data/interfaces/default/edit_user.html b/data/interfaces/default/edit_user.html index 502ab11a..583a918e 100644 --- a/data/interfaces/default/edit_user.html +++ b/data/interfaces/default/edit_user.html @@ -10,21 +10,30 @@ Variable names: data [list] data :: Usable parameters == Global keys == -user Return the real Plex username -user_id Return the Plex user_id -friendly_name Returns the friendly edited Plex username -do_notify Returns bool value for whether the user should trigger notifications -keep_history Returns bool value for whether the user's activity should be logged +user_id Returns the user id of the user. +username Returns the user's username. +friendly_name Returns the friendly name of the user. +email Returns the user's email address. +user_thumb Returns the thumbnail for the user. +is_home_user Returns bool value for whether the user is part of a Plex Home. +is_allow_sync Returns bool value for whether the user has sync rights. +is_restricted Returns bool value for whether the user account is restricted. +do_notify Returns bool value for whether to send notifications for the user. +keep_history Returns bool value for whether to keep history for the user. DOCUMENTATION :: END -% if data is not None: +<%! + from plexpy import helpers +%> + +% if data != None:
    - % endif \ No newline at end of file diff --git a/data/interfaces/default/history.html b/data/interfaces/default/history.html index 59cb8aa0..992f2df6 100644 --- a/data/interfaces/default/history.html +++ b/data/interfaces/default/history.html @@ -25,18 +25,18 @@ - - - - - - - - - - - - + + + + + + + + + + + + @@ -83,8 +83,8 @@ type: 'post', data: function (d) { return { - 'json_data': JSON.stringify(d), - 'media_type': media_type + json_data: JSON.stringify(d), + media_type: media_type }; } } diff --git a/data/interfaces/default/history_table_modal.html b/data/interfaces/default/history_table_modal.html index 19de1faf..a6d6804d 100644 --- a/data/interfaces/default/history_table_modal.html +++ b/data/interfaces/default/history_table_modal.html @@ -13,11 +13,11 @@
    DeleteTimeUserIP AddressPlatformPlayerTitleStartedPausedStoppedDurationDeleteTimeUserIP AddressPlatformPlayerTitleStartedPausedStoppedDuration
    - - - - - + + + + + @@ -34,13 +34,14 @@ $(document).ready(function() { $('#date-header').html(moment('${data}','YYYY-MM-DD').format('ddd MMM Do YYYY')); history_table_modal_options.ajax = { - "url": "get_history", - type: "post", + url: 'get_history', + type: 'post', data: function ( d ) { - return { 'json_data': JSON.stringify( d ), - 'grouping': false, - 'start_date': '${data}' - }; + return { + json_data: JSON.stringify(d), + grouping: false, + start_date: '${data}' + }; } } diff --git a/data/interfaces/default/home_stats.html b/data/interfaces/default/home_stats.html index fa4ac42d..de0995ea 100644 --- a/data/interfaces/default/home_stats.html +++ b/data/interfaces/default/home_stats.html @@ -70,7 +70,6 @@ DOCUMENTATION :: END %> % if data: -% if data[0]['stat_id']:
    StartedStoppedUserPlayerTitleStartedStoppedUserPlayerTitle
    ' + + '
    ' + '' + '' + '' + diff --git a/data/interfaces/default/js/tables/history_table_modal.js b/data/interfaces/default/js/tables/history_table_modal.js index 1080bdc3..30cd1db7 100644 --- a/data/interfaces/default/js/tables/history_table_modal.js +++ b/data/interfaces/default/js/tables/history_table_modal.js @@ -25,6 +25,7 @@ history_table_modal_options = { "serverSide": true, "pageLength": 10, "lengthChange": false, + "autoWidth": false, "order": [ 0, 'desc'], "columnDefs": [ { @@ -37,9 +38,9 @@ history_table_modal_options = { $(td).html(moment(cellData,"X").format(time_format)); } }, - "searchable": false, + "width": "10%", "className": "no-wrap", - "width": "5%" + "searchable": false }, { "targets": [1], @@ -51,9 +52,9 @@ history_table_modal_options = { $(td).html(moment(cellData,"X").format(time_format)); } }, - "searchable": false, + "width": "10%", "className": "no-wrap", - "width": "5%" + "searchable": false }, { "targets": [2], @@ -69,6 +70,7 @@ history_table_modal_options = { $(td).html(cellData); } }, + "width": "15%", "className": "no-wrap hidden-xs" }, { @@ -87,6 +89,7 @@ history_table_modal_options = { $(td).html('
    ' + transcode_dec + ' ' + cellData + '
    '); } }, + "width": "25%", "className": "no-wrap hidden-sm hidden-xs modal-control" }, { @@ -98,22 +101,23 @@ history_table_modal_options = { var thumb_popover = ''; if (rowData['media_type'] === 'movie') { media_type = ''; - thumb_popover = '' + cellData + ' (' + rowData['year'] + ')' - $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); + thumb_popover = '' + cellData + ' (' + rowData['year'] + ')' + $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); } else if (rowData['media_type'] === 'episode') { media_type = ''; - thumb_popover = '' + cellData + ' \ + thumb_popover = '' + cellData + ' \ (S' + rowData['parent_media_index'] + '· E' + rowData['media_index'] + ')' - $(td).html(''); + $(td).html(''); } else if (rowData['media_type'] === 'track') { media_type = ''; - thumb_popover = '' + cellData + ' (' + rowData['parent_title'] + ')' - $(td).html(''); + thumb_popover = '' + cellData + ' (' + rowData['parent_title'] + ')' + $(td).html(''); } else { - $(td).html('' + cellData + ''); + $(td).html('' + cellData + ''); } } - } + }, + "width": "40%" } ], "drawCallback": function (settings) { @@ -129,8 +133,9 @@ history_table_modal_options = { container: '#history-modal', trigger: 'hover', placement: 'right', + template: '', content: function () { - return '
    '; + return '
    '; } }); }, diff --git a/data/interfaces/default/js/tables/libraries.js b/data/interfaces/default/js/tables/libraries.js new file mode 100644 index 00000000..c49ef8ee --- /dev/null +++ b/data/interfaces/default/js/tables/libraries.js @@ -0,0 +1,293 @@ +var libraries_to_delete = []; +var libraries_to_purge = []; + +libraries_list_table_options = { + "language": { + "search": "Search: ", + "lengthMenu":"Show _MENU_ entries per page", + "info":"Showing _START_ to _END_ of _TOTAL_ active libraries", + "infoEmpty":"Showing 0 to 0 of 0 entries", + "infoFiltered":"", + "emptyTable": "No data in table", + }, + "destroy": true, + "processing": false, + "serverSide": true, + "pageLength": 10, + "order": [ 2, 'asc'], + "autoWidth": true, + "stateSave": true, + "pagingType": "bootstrap", + "columnDefs": [ + { + "targets": [0], + "data": null, + "createdCell": function (td, cellData, rowData, row, col) { + $(td).html('
    ' + + ' ' + + '   ' + + ' ' + + ' ' + + ' ' + + '
    '); + }, + "width": "7%", + "className": "edit-control no-wrap hidden", + "searchable": false, + "orderable": false + }, + { + "targets": [1], + "data": "library_thumb", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + if (rowData['library_thumb'].substring(0, 4) == "http") { + $(td).html('
    '); + } else { + $(td).html('
    '); + } + } else { + $(td).html('
    '); + } + }, + "orderable": false, + "searchable": false, + "width": "5%", + "className": "libraries-thumbs" + }, + { + "targets": [2], + "data": "section_name", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html('
    ' + + '' + cellData + '' + + '
    '); + } else { + $(td).html('n/a'); + } + }, + "width": "10%", + "className": "no-wrap" + }, + { + "targets": [3], + "data": "section_type", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "10%", + "className": "no-wrap hidden-xs" + }, + { + "targets": [4], + "data": "count", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + + }, + "width": "10%", + "className": "no-wrap hidden-xs" + }, + { + "targets": [5], + "data": "parent_count", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + + }, + "width": "10%", + "className": "no-wrap hidden-xs" + }, + { + "targets": [6], + "data": "child_count", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + + }, + "width": "10%", + "className": "no-wrap hidden-xs" + }, + { + "targets": [7], + "data": "last_accessed", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(moment(cellData, "X").fromNow()); + } else { + $(td).html("never"); + } + }, + "searchable": false, + "width": "10%", + "className": "no-wrap hidden-xs" + }, + { + "targets": [8], + "data":"last_watched", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + var media_type = ''; + var thumb_popover = '' + if (rowData['media_type'] === 'movie') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'episode') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'track') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type']) { + $(td).html('' + cellData + ''); + } + } else { + $(td).html('n/a'); + } + }, + "width": "25%", + "className": "hidden-sm hidden-xs" + }, + { + "targets": [9], + "data": "plays", + "searchable": false, + "width": "10%" + } + + ], + "drawCallback": function (settings) { + // Jump to top of page + //$('html,body').scrollTop(0); + $('#ajaxMsg').fadeOut(); + + // Create the tooltips. + $('.purge-tooltip').tooltip(); + $('.edit-tooltip').tooltip(); + $('.transcode-tooltip').tooltip(); + $('.media-type-tooltip').tooltip(); + $('.thumb-tooltip').popover({ + html: true, + container: 'body', + trigger: 'hover', + placement: 'right', + template: '', + content: function () { + return '
    '; + } + }); + + if ($('#row-edit-mode').hasClass('active')) { + $('.edit-control').each(function () { + $(this).removeClass('hidden'); + }); + } + }, + "preDrawCallback": function(settings) { + var msg = " Fetching rows..."; + showMsg(msg, false, false, 0) + }, + "rowCallback": function (row, rowData) { + if ($.inArray(rowData['section_id'], libraries_to_delete) !== -1) { + $(row).find('button.delete-library[data-id="' + rowData['section_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger'); + } + if ($.inArray(rowData['section_id'], libraries_to_purge) !== -1) { + $(row).find('button.purge-library[data-id="' + rowData['section_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger'); + } + } +} + +$('#libraries_list_table').on('change', 'td.edit-control > .edit-library-toggles > input', function () { + var tr = $(this).parents('tr'); + var row = libraries_list_table.row(tr); + var rowData = row.data(); + + var do_notify = 0; + var do_notify_created = 0; + var keep_history = 0; + if ($('#do_notify-' + rowData['section_id']).is(':checked')) { + do_notify = 1; + } + if ($('#do_notify_created-' + rowData['section_id']).is(':checked')) { + do_notify_created = 1; + } + if ($('#keep_history-' + rowData['section_id']).is(':checked')) { + keep_history = 1; + } + if (rowData['custom_thumb']) { + custom_thumb = rowData['custom_thumb'] + } else { + custom_thumb = rowData['library_thumb'] + } + + $.ajax({ + url: 'edit_library', + data: { + section_id: rowData['section_id'], + do_notify: do_notify, + do_notify_created: do_notify_created, + keep_history: keep_history, + custom_thumb: custom_thumb + }, + cache: false, + async: true, + success: function (data) { + var msg = "Library updated"; + showMsg(msg, false, true, 2000); + } + }); +}); + +$('#libraries_list_table').on('click', 'td.edit-control > .edit-library-toggles > button.delete-library', function () { + var tr = $(this).parents('tr'); + var row = libraries_list_table.row(tr); + var rowData = row.data(); + + var index_delete = $.inArray(rowData['section_id'], libraries_to_delete); + var index_purge = $.inArray(rowData['section_id'], libraries_to_purge); + + if (index_delete === -1) { + libraries_to_delete.push(rowData['section_id']); + if (index_purge === -1) { + tr.find('button.purge-library').click(); + } + } else { + libraries_to_delete.splice(index_delete, 1); + if (index_purge != -1) { + tr.find('button.purge-library').click(); + } + } + $(this).toggleClass('btn-warning').toggleClass('btn-danger'); + +}); + +$('#libraries_list_table').on('click', 'td.edit-control > .edit-library-toggles > button.purge-library', function () { + var tr = $(this).parents('tr'); + var row = libraries_list_table.row(tr); + var rowData = row.data(); + + var index_delete = $.inArray(rowData['section_id'], libraries_to_delete); + var index_purge = $.inArray(rowData['section_id'], libraries_to_purge); + + if (index_purge === -1) { + libraries_to_purge.push(rowData['section_id']); + } else { + libraries_to_purge.splice(index_purge, 1); + if (index_delete != -1) { + tr.find('button.delete-library').click(); + } + } + $(this).toggleClass('btn-warning').toggleClass('btn-danger'); +}); \ No newline at end of file diff --git a/data/interfaces/default/js/tables/media_info_table.js b/data/interfaces/default/js/tables/media_info_table.js new file mode 100644 index 00000000..860f06ab --- /dev/null +++ b/data/interfaces/default/js/tables/media_info_table.js @@ -0,0 +1,479 @@ +var date_format = 'YYYY-MM-DD'; +var time_format = 'hh:mm a'; + +$.ajax({ + url: 'get_date_formats', + type: 'GET', + success: function(data) { + date_format = data.date_format; + time_format = data.time_format; + } +}); + +var refresh_child_tables = false; + +media_info_table_options = { + "destroy": true, + "language": { + "search": "Search: ", + "lengthMenu":"Show _MENU_ entries per page", + "info":"Showing _START_ to _END_ of _TOTAL_ library items", + "infoEmpty":"Showing 0 to 0 of 0 entries", + "infoFiltered":"", + "emptyTable": "No data in table" + }, + "pagingType": "bootstrap", + "stateSave": false, + "processing": false, + "serverSide": true, + "pageLength": 25, + "order": [ 1, 'asc'], + "autoWidth": false, + "columnDefs": [ + { + "targets": [0], + "data": "added_at", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + var expand_details = ''; + var date = moment(cellData, "X").format(date_format); + if (rowData['media_type'] === 'show') { + expand_details = ''; + $(td).html(''); + } else if (rowData['media_type'] === 'season') { + expand_details = ''; + $(td).html(''); + } else if (rowData['media_type'] === 'artist') { + expand_details = ''; + $(td).html(''); + } else if (rowData['media_type'] === 'album') { + expand_details = ''; + $(td).html(''); + } else if (rowData['media_type'] === 'photo' && rowData['parent_rating_key'] == '') { + expand_details = ''; + $(td).html(''); + } else { + $(td).html('
     ' + date + '
    '); + } + } + }, + "width": "7%", + "className": "no-wrap expand-media-info", + "searchable": false + }, + { + "targets": [1], + "data": "title", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + var media_type = ''; + var thumb_popover = ''; + if (rowData['media_type'] === 'movie') { + media_type = ''; + thumb_popover = '' + cellData + ' (' + rowData['year'] + ')' + $(td).html(''); + } else if (rowData['media_type'] === 'show') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'season') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'episode') { + media_type = ''; + thumb_popover = 'E' + rowData['media_index'] + ' - ' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'artist') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'album') { + media_type = ''; + thumb_popover = '' + cellData + '' + $(td).html(''); + } else if (rowData['media_type'] === 'track') { + media_type = ''; + thumb_popover = 'T' + rowData['media_index'] + ' - ' + cellData + '' + $(td).html(''); + } else { + $(td).html(cellData); + } + } + }, + "width": "20%" + }, + { + "targets": [2], + "data": "container", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "6%", + "className": "no-wrap hidden-sm hidden-xs" + }, + { + "targets": [3], + "data": "bitrate", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData + ' kbps'); + } + }, + "width": "6%", + "className": "no-wrap hidden-md hidden-sm hidden-xs", + "searchable": false + }, + { + "targets": [4], + "data": "video_codec", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "8%", + "className": "no-wrap hidden-sm hidden-xs" + }, + { + "targets": [5], + "data": "video_resolution", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "8%", + "className": "no-wrap hidden-md hidden-sm hidden-xs" + }, + { + "targets": [6], + "data": "video_framerate", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "8%", + "className": "no-wrap hidden-md hidden-sm hidden-xs" + }, + { + "targets": [7], + "data": "audio_codec", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "8%", + "className": "no-wrap hidden-sm hidden-xs" + }, + { + "targets": [8], + "data": "audio_channels", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData + ' ch'); + } + }, + "width": "8%", + "className": "no-wrap hidden-md hidden-sm hidden-xs" + }, + { + "targets": [9], + "data": "file_size", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(Math.round(cellData / Math.pow(1024, 2)).toString() + ' MiB'); + } else { + if (rowData['section_type'] != 'photo' && get_file_sizes != null) { + get_file_sizes = true; + } + } + }, + "width": "7%", + "className": "no-wrap hidden-md hidden-sm hidden-xs", + "searchable": false + }, + { + "targets": [10], + "data": "last_watched", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + date = moment(cellData, "X").format(date_format); + $(td).html(date); + } + }, + "width": "7%", + "className": "no-wrap hidden-xs", + "searchable": false + }, + { + "targets": [11], + "data": "play_count", + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(cellData); + } + }, + "width": "5%", + "className": "no-wrap hidden-xs", + "searchable": false + } + ], + "drawCallback": function (settings) { + // Jump to top of page + // $('html,body').scrollTop(0); + $('#ajaxMsg').fadeOut(); + + // Create the tooltips. + $('.expand-media-info-tooltip').tooltip({ container: 'body' }); + $('.media-type-tooltip').tooltip({ container: 'body' }); + $('.thumb-tooltip').popover({ + html: true, + container: 'body', + trigger: 'hover', + placement: 'right', + template: '', + content: function () { + return '
    '; + } + }); + + media_info_table.rows().every(function () { + var rowData = this.data(); + if (rowData['rating_key'] in media_info_child_table) { + // if a child table was already created + $(this.node()).find('i.fa.fa-plus-circle').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle'); + this.child(childTableFormatMedia(rowData)).show(); + createChildTableMedia(this, rowData) + } + }); + + if (get_file_sizes) { + $('#get_file_sizes_message').show(); + $('#refresh-media-info-table').prop('disabled', true); + $.ajax({ + url: 'get_media_info_file_sizes', + async: true, + data: { section_id: section_id }, + complete: function (xhr, status) { + response = JSON.parse(xhr.responseText) + if (response.success == true) { + $('#get_file_sizes_message').hide(); + $('#refresh-media-info-table').prop('disabled', false); + media_info_table.draw(); + } + } + }); + get_file_sizes = false; + } + + $("#media_info_table_info").append(''); + }, + "preDrawCallback": function(settings) { + var msg = " Fetching rows..."; + showMsg(msg, false, false, 0) + }, + "rowCallback": function (row, rowData, rowIndex) { + if (rowData['rating_key'] in media_info_child_table) { + // if a child table was already created + $(row).addClass('shown') + media_info_table.row(row).child(childTableFormatMedia(rowData)).show(); + } + } +} + +// Parent table expand detailed media info +$('#media_info_table').on('click', '> tbody > tr > td.expand-media-info a', function () { + var tr = $(this).closest('tr'); + var row = media_info_table.row(tr); + var rowData = row.data(); + + $(this).find('i.fa').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle'); + + if (row.child.isShown()) { + $('div.slider', row.child()).slideUp(function () { + row.child.hide(); + tr.removeClass('shown'); + delete media_info_child_table[rowData['rating_key']]; + }); + } else { + tr.addClass('shown'); + row.child(childTableFormatMedia(rowData)).show(); + createChildTableMedia(row, rowData); + } +}); + +// Initialize the detailed media info child table options using the parent table options +function childTableOptionsMedia(rowData) { + switch (rowData['media_type']) { + case 'show': + section_type = 'season'; + break; + case 'season': + section_type = 'episode'; + break; + case 'artist': + section_type = 'album'; + break; + case 'album': + section_type = 'track'; + break; + case 'photo': + section_type = 'picture'; + break; + } + + media_info_table_options = media_info_table_options; + // Remove settings that are not necessary + media_info_table_options.searching = false; + media_info_table_options.lengthChange = false; + media_info_table_options.info = false; + media_info_table_options.pageLength = 10; + media_info_table_options.bStateSave = false; + media_info_table_options.ajax = { + url: 'get_library_media_info', + type: 'post', + data: function (d) { + return { + json_data: JSON.stringify(d), + section_id: rowData['section_id'], + section_type: section_type, + rating_key: rowData['rating_key'], + refresh: refresh_child_tables + }; + } + } + media_info_table_options.fnDrawCallback = function (settings) { + $('#ajaxMsg').fadeOut(); + + // Create the tooltips. + $('.expand-media-info-tooltip').tooltip({ container: 'body' }); + $('.media-type-tooltip').tooltip(); + $('.thumb-tooltip').popover({ + html: true, + container: 'body', + trigger: 'hover', + placement: 'right', + template: '', + content: function () { + return '
    '; + } + }); + + if (rowData['rating_key'] in media_info_child_table) { + media_info_child_table[rowData['rating_key']].rows().every(function () { + var childrowData = this.data(); + if (childrowData['rating_key'] in media_info_child_table) { + // if a child table was already created + $(this.node()).find('i.fa.fa-plus-circle').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle'); + this.child(childTableFormatMedia(childrowData)).show(); + createChildTableMedia(this, childrowData) + } + }); + } + + if (get_file_sizes) { + $('#refresh-media-info-table').prop('disabled', true); + $.ajax({ + url: 'get_media_info_file_sizes', + async: true, + data: { + section_id: section_id, + rating_key: rowData['rating_key'] + }, + complete: function (xhr, status) { + response = JSON.parse(xhr.responseText) + if (response.success == true) { + $('#refresh-media-info-table').prop('disabled', false); + media_info_child_table[rowData['rating_key']].draw(); + } + } + }); + get_file_sizes = false; + } + + $(this).closest('div.slider').slideDown(); + } + media_info_table_options.fnRowCallback = function (row, rowData, rowIndex) { + if (rowData['rating_key'] in media_info_child_table) { + // if a child table was already created + $(row).addClass('shown') + media_info_table.row(row).child(childTableFormatMedia(rowData)).show(); + } + } + + return media_info_table_options; +} + +// Format the detailed media info child table +function childTableFormatMedia(rowData) { + return '
    ' + + '
    Delete
    ' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '' + + '
    Added AtTitleContainerBitrateVideo CodecVideo ResolutionVideo FramerateAudio CodecAudio ChannelsFile SizeLast WatchedTotal Plays
    ' + + '
    '; +} + +// Create the detailed media info child table +media_info_child_table = {}; +function createChildTableMedia(row, rowData) { + media_info_table_options = childTableOptionsMedia(rowData); + // initialize the child table + media_info_child_table[rowData['rating_key']] = $('#media_info_child-' + rowData['rating_key']).DataTable(media_info_table_options); + + // Set child table column visibility to match parent table + var visibility = media_info_table.columns().visible(); + for (var i = 0; i < visibility.length; i++) { + if (!(visibility[i])) { media_info_child_table[rowData['rating_key']].column(i).visible(visibility[i]); } + } + media_info_table.on('column-visibility', function (e, settings, colIdx, visibility) { + if (row.child.isShown()) { + media_info_child_table[rowData['rating_key']].column(colIdx).visible(visibility); + } + }); + + // Child table expand detailed media info + $('table[id^=media_info_child-' + rowData['rating_key'] + ']').on('click', '> tbody > tr > td.expand-media-info a', function () { + var table_id = $(this).closest('table').data('id'); + var tr = $(this).closest('tr'); + var row = media_info_child_table[table_id].row(tr); + var rowData = row.data(); + + $(this).find('i.fa').toggleClass('fa-plus-circle').toggleClass('fa-minus-circle'); + + if (row.child.isShown()) { + $('div.slider', row.child()).slideUp(function () { + row.child.hide(); + tr.removeClass('shown'); + delete media_info_child_table[rowData['rating_key']]; + }); + } else { + tr.addClass('shown'); + row.child(childTableFormatMedia(rowData)).show(); + createChildTableMedia(row, rowData); + } + }); +} \ No newline at end of file diff --git a/data/interfaces/default/js/tables/plex_logs.js b/data/interfaces/default/js/tables/plex_logs.js index 50b76199..8a1109ba 100644 --- a/data/interfaces/default/js/tables/plex_logs.js +++ b/data/interfaces/default/js/tables/plex_logs.js @@ -4,7 +4,7 @@ var plex_log_table_options = { "serverSide": false, "pagingType": "bootstrap", "order": [ 0, 'desc'], - "pageLength": 10, + "pageLength": 50, "stateSave": true, "language": { "search":"Search: ", diff --git a/data/interfaces/default/js/tables/sync_table.js b/data/interfaces/default/js/tables/sync_table.js index f4b85407..94f4e289 100644 --- a/data/interfaces/default/js/tables/sync_table.js +++ b/data/interfaces/default/js/tables/sync_table.js @@ -50,7 +50,7 @@ sync_table_options = { "createdCell": function (td, cellData, rowData, row, col) { if (cellData !== '') { if (rowData['metadata_type'] !== '') { - $(td).html('' + cellData + ''); + $(td).html('' + cellData + ''); } else { $(td).html(cellData); } diff --git a/data/interfaces/default/js/tables/user_ips.js b/data/interfaces/default/js/tables/user_ips.js index 506fe43a..46cbee3a 100644 --- a/data/interfaces/default/js/tables/user_ips.js +++ b/data/interfaces/default/js/tables/user_ips.js @@ -88,18 +88,18 @@ user_ip_table_options = { var thumb_popover = '' if (rowData['media_type'] === 'movie') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); + thumb_popover = '' + cellData + '' + $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); } else if (rowData['media_type'] === 'episode') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); + thumb_popover = '' + cellData + '' + $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); } else if (rowData['media_type'] === 'track') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); + thumb_popover = '' + cellData + '' + $(td).html('
    ' + media_type + ' ' + thumb_popover + '
    '); } else if (rowData['media_type']) { - $(td).html('' + cellData + ''); + $(td).html('' + cellData + ''); } else { $(td).html('n/a'); } @@ -126,10 +126,12 @@ user_ip_table_options = { $('.watched-tooltip').tooltip(); $('.thumb-tooltip').popover({ html: true, + container: 'body', trigger: 'hover', placement: 'right', + template: '', content: function () { - return '
    '; + return '
    '; } }); diff --git a/data/interfaces/default/js/tables/users.js b/data/interfaces/default/js/tables/users.js index ab03435e..7563868f 100644 --- a/data/interfaces/default/js/tables/users.js +++ b/data/interfaces/default/js/tables/users.js @@ -23,12 +23,12 @@ users_list_table_options = { "targets": [0], "data": null, "createdCell": function (td, cellData, rowData, row, col) { - $(td).html('
     ' + + $(td).html('
    ' + + ' ' + '   ' + ' ' + - ' '); - // Show/hide user currently doesn't work - //''); + ' ' + + '
    '); }, "width": "7%", "className": "edit-control no-wrap hidden", @@ -42,7 +42,7 @@ users_list_table_options = { if (cellData === '') { $(td).html('
    '); } else { - $(td).html('
    '); + $(td).html('
    '); } }, "orderable": false, @@ -54,16 +54,13 @@ users_list_table_options = { "targets": [2], "data": "friendly_name", "createdCell": function (td, cellData, rowData, row, col) { - if (cellData !== '') { - if (rowData['user_id'] > 0) { - $(td).html(''); - } else { - $(td).html(''); - } + if (cellData !== null && cellData !== '') { + $(td).html('
    ' + + '' + cellData + '' + + '' + + '
    '); } else { - $(td).html(cellData); + $(td).html('n/a'); } }, "width": "10%", @@ -72,11 +69,11 @@ users_list_table_options = { { "targets": [3], "data": "last_seen", - "render": function ( data, type, full ) { - if (data) { - return moment(data, "X").fromNow(); + "createdCell": function (td, cellData, rowData, row, col) { + if (cellData !== null && cellData !== '') { + $(td).html(moment(cellData, "X").fromNow()); } else { - return "never"; + $(td).html("never"); } }, "searchable": false, @@ -108,7 +105,7 @@ users_list_table_options = { "targets": [5], "data": "platform", "createdCell": function (td, cellData, rowData, row, col) { - if (cellData !== '') { + if (cellData !== null && cellData !== '') { $(td).html(cellData); } else { $(td).html('n/a'); @@ -121,7 +118,7 @@ users_list_table_options = { "targets": [6], "data":"player", "createdCell": function (td, cellData, rowData, row, col) { - if (cellData) { + if (cellData !== null && cellData !== '') { var transcode_dec = ''; if (rowData['video_decision'] === 'transcode') { transcode_dec = ''; @@ -142,26 +139,26 @@ users_list_table_options = { "targets": [7], "data":"last_watched", "createdCell": function (td, cellData, rowData, row, col) { - if (cellData !== '') { + if (cellData !== null && cellData !== '') { var media_type = ''; var thumb_popover = '' if (rowData['media_type'] === 'movie') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html(''); + thumb_popover = '' + cellData + '' + $(td).html(''); } else if (rowData['media_type'] === 'episode') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html(''); + thumb_popover = '' + cellData + '' + $(td).html(''); } else if (rowData['media_type'] === 'track') { media_type = ''; - thumb_popover = '' + cellData + '' - $(td).html(''); + thumb_popover = '' + cellData + '' + $(td).html(''); } else if (rowData['media_type']) { - $(td).html('' + cellData + ''); - } else { - $(td).html('n/a'); + $(td).html('' + cellData + ''); } + } else { + $(td).html('n/a'); } }, "width": "30%", @@ -190,8 +187,9 @@ users_list_table_options = { html: true, trigger: 'hover', placement: 'right', + template: '', content: function () { - return '
    '; + return '
    '; } }); @@ -206,8 +204,11 @@ users_list_table_options = { showMsg(msg, false, false, 0) }, "rowCallback": function (row, rowData) { + if ($.inArray(rowData['user_id'], users_to_delete) !== -1) { + $(row).find('button.delete-user[data-id="' + rowData['user_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger'); + } if ($.inArray(rowData['user_id'], users_to_purge) !== -1) { - $(row).find('button[data-id="' + rowData['user_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger'); + $(row).find('button.purge-user[data-id="' + rowData['user_id'] + '"]').toggleClass('btn-warning').toggleClass('btn-danger'); } } } diff --git a/data/interfaces/default/libraries.html b/data/interfaces/default/libraries.html new file mode 100644 index 00000000..78697293 --- /dev/null +++ b/data/interfaces/default/libraries.html @@ -0,0 +1,202 @@ +<%inherit file="base.html"/> + +<%def name="headIncludes()"> + + + + +<%def name="body()"> +
    + % if config['update_section_ids'] == 1: +
    + PlexPy needs to update the Library IDs in your databse. Click the "Refresh libraries" button below to begin the update. +
    + % elif config['update_section_ids'] == -1: +
    + PlexPy is updating library IDs in the database. This could take a few minutes depending on the size of your database. +
    + You may leave this page and come back later. +
    + % endif +
    +
    + All Libraries +
    +
    + % if config['update_section_ids'] == -1: + + % else: + + % endif +   + +
    +
    +
    + + + + + + + + + + + + + + + + + +
    EditLibrary NameLibrary TypeTotal Movies / TV Shows / ArtistsTotal Seasons / AlbumsTotal Episodes / TracksLast AccessedLast WatchedTotal Plays
    + +
    +
    + + + +<%def name="javascriptIncludes()"> + + + + + + + \ No newline at end of file diff --git a/data/interfaces/default/library.html b/data/interfaces/default/library.html new file mode 100644 index 00000000..4f75e7fd --- /dev/null +++ b/data/interfaces/default/library.html @@ -0,0 +1,519 @@ +<%doc> +USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE + +For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/ + +Filename: library.html +Version: 0.1 +Variable names: data [list] + +data :: Usable parameters + +section_id Returns the library id of the library. +section_name Returns the name of the library. +section_type Returns the type of the library. +library_thumb Returns the thumbnail for the library. +custom_thumb Returns the custom thumbnail for the library. +library_art Returns the artwork for the library. +count Returns the item count for the library. +parent_count Returns the parent item count for the library. +child_count Returns the child item count for the library. +do_notify Returns bool value for whether to send notifications for the library. +keep_history Returns bool value for whether to keep history for the library. + +DOCUMENTATION :: END + + + +<%inherit file="base.html"/> + +<%def name="headIncludes()"> + + + + + +<%def name="body()"> +% if data: +
    +
    +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    + +
    +
    + +
    +
    +
    +
    +
    +
    +
    + Global Stats +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + User Stats +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + Recently Watched +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + Recently Added +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + + Watch History for + ${data['section_name']} + + +
    +
    + + + +
    +
    +
    + + + + + + + + + + + + + + + + + + +
    DeleteTimeUserIP AddressPlatformPlayerTitleStartedPausedStoppedDuration
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + % if config['get_file_sizes'] and data['section_id'] in config['get_file_sizes_hold']['section_ids']: +
    + % else: + +
    +
    + + All Media Info for + ${data['section_name']} + + +
    +
    + % if config['get_file_sizes'] and data['section_id'] in config['get_file_sizes_hold']['section_ids']: + + % else: + + % endif + +
    +
    +
    + + + + + + + + + + + + + + + + + + +
    Added AtTitleContainerBitrateVideo CodecVideo ResolutionVideo FramerateAudio CodecAudio ChannelsFile SizeLast WatchedTotal Plays
    +
    +
    +
    +
    +
    + + + +
    +
    +
    +
    +
    +
    +% else: +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + Error retrieving library information. Please see the logs for more details. +
    +
    +
    +
    +
    +
    +% endif + + +<%def name="javascriptIncludes()"> + + + + +% if data: + + + + + +% endif + \ No newline at end of file diff --git a/data/interfaces/default/library_recently_added.html b/data/interfaces/default/library_recently_added.html new file mode 100644 index 00000000..264bce2b --- /dev/null +++ b/data/interfaces/default/library_recently_added.html @@ -0,0 +1,96 @@ +<%doc> +USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE + +For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/ + +Filename: library_recently_added.html +Version: 0.1 +Variable names: data [array] + +data[array_index] :: Usable parameters + +== Global keys == +media_type Returns the type of media. Either 'movie' or 'episode' or 'album'. +rating_key Returns the unique identifier for the media item. +parent_rating_key Returns the unique identifier for the season or artist. +grandparent_rating_key Returns the unique identifier for the show. +title Returns the name of the movie, episode, album. +parent_title Returns the name of the artist. +grandparent_title Returns the name of the show. +media_index Returns the index number of the episode. +parent_media_index Returns the index number of the season. +section_id Returns the library section number of the media item. +library_name Returns the library section name of the media item. +year Returns the release year of the movie, episode, or album. +thumb Returns the location of the item's thumbnail. Use with pms_image_proxy. +parent_thumb Returns the location of the artist's thumbnail. Use with pms_image_proxy. +grandparent_thumb Returns the location of the show's thumbnail. Use with pms_image_proxy. +added_at Returns the time when the media was added to the library. + +DOCUMENTATION :: END + + +% if data: + +% else: +
    Unable to retrieve data from database. +

    +% endif \ No newline at end of file diff --git a/data/interfaces/default/library_stats.html b/data/interfaces/default/library_stats.html index f310ab5c..048d5e6f 100644 --- a/data/interfaces/default/library_stats.html +++ b/data/interfaces/default/library_stats.html @@ -9,23 +9,16 @@ Variable names: data [array] data[array_index] :: Usable parameters -data['type'] Returns the type of the library. Either 'movie', 'show', 'photo', or 'artist'. -data['rows'] Returns an array containing stat data +data Returns an array containing stat data -data[array_index]['rows'] :: Usable parameters +data[array_index] :: Usable parameters -title Returns the title of the library. +section_name Returns the title of the library. +section_type Returns the type of the library. thumb Returns the thumb of the library. -count Returns the number of items in the library. -count_type Returns the sorting type for the library - -== Only if 'type' is 'show' -episode_count Return the number of episodes in the library. -episode_count_type Return the sorting type for the episodes. - -== Only if 'type' is 'artist' -album_count Return the number of episodes in the library. -album_count_type Return the sorting type for the episodes. +count Returns the number of top level items in the library. +parent_count Returns the number of parent items in the library. +child_count Returns the number of child items in the library. DOCUMENTATION :: END @@ -37,37 +30,54 @@ DOCUMENTATION :: END
  • - % if library['type'] != 'photo':

    - ${library['rows']['title']} + ${library['section_name']}

    - % else: -

    ${library['rows']['title']}

    - % endif
    + % if library['section_type'] == 'movie':
    -
    ${library['rows']['count_type']}
    -

    ${library['rows']['count']}

    +
    All Movies
    +

    ${library['count']}

    items

    - % if library['type'] == 'show': -
    -
    ${library['rows']['episode_count_type']}
    -

    ${library['rows']['episode_count']}

    + % elif library['section_type'] == 'show': +
    +
    All Shows
    +

    ${library['count']}

    items

    - % endif - % if library['type'] == 'artist':
    -
    ${library['rows']['album_count_type']}
    -

    ${library['rows']['album_count']}

    +
    All Episodes
    +

    ${library['child_count']}

    +

    items

    +
    + % elif library['section_type'] == 'artist': +
    +
    All Artists
    +

    ${library['count']}

    +

    items

    +
    +
    +
    All Albums
    +

    ${library['parent_count']}

    +

    items

    +
    + % elif library['section_type'] == 'photo': +
    +
    All Albums
    +

    ${library['count']}

    +

    items

    +
    +
    +
    All Photos
    +

    ${library['child_count']}

    items

    % endif
    - % if library['rows']['thumb']: + % if library['thumb']:
    -
    +
    % else:
    @@ -79,6 +89,5 @@ DOCUMENTATION :: END % endfor % else: -
    Unable to retrieve data from server. Please check your settings. -

    +
    No stats to show.

    % endif \ No newline at end of file diff --git a/data/interfaces/default/library_user_stats.html b/data/interfaces/default/library_user_stats.html new file mode 100644 index 00000000..c8bf832a --- /dev/null +++ b/data/interfaces/default/library_user_stats.html @@ -0,0 +1,42 @@ +<%doc> +USAGE DOCUMENTATION :: PLEASE LEAVE THIS AT THE TOP OF THIS FILE + +For Mako templating syntax documentation please visit: http://docs.makotemplates.org/en/latest/ + +Filename: library_user_stats.html +Version: 0.1 +Variable names: data [array] + +data[array_index] :: Usable parameters + +== Global keys == +user Returns the name of the user. +user_id Returns the user id of the user. +thumb Returns the avatar of the user. +total_plays Returns the play count for the user. + +DOCUMENTATION :: END + + +% if data: +% for a in data: + +% endfor +% else: +
    No stats to show.

    +% endif \ No newline at end of file diff --git a/data/interfaces/default/logs.html b/data/interfaces/default/logs.html index bb992c17..ecb1d17f 100644 --- a/data/interfaces/default/logs.html +++ b/data/interfaces/default/logs.html @@ -18,7 +18,7 @@ from plexpy import helpers
    - Logs + Logs
    diff --git a/data/interfaces/default/recently_added.html b/data/interfaces/default/recently_added.html index f961801a..bba1601b 100644 --- a/data/interfaces/default/recently_added.html +++ b/data/interfaces/default/recently_added.html @@ -10,15 +10,22 @@ Variable names: data [array] data[array_index] :: Usable parameters == Global keys == +media_type Returns the type of media. Either 'movie' or 'episode' or 'album'. rating_key Returns the unique identifier for the media item. -media_type Returns the media type of media. Either 'movie' or 'season' or 'album'. +parent_rating_key Returns the unique identifier for the season or artist. +grandparent_rating_key Returns the unique identifier for the show. +title Returns the name of the movie, episode, album. +parent_title Returns the name of the artist. +grandparent_title Returns the name of the show. +media_index Returns the index number of the episode. +parent_media_index Returns the index number of the season. +section_id Returns the library section number of the media item. +library_name Returns the library section name of the media item. +year Returns the release year of the movie, episode, or album. thumb Returns the location of the item's thumbnail. Use with pms_image_proxy. +parent_thumb Returns the location of the artist's thumbnail. Use with pms_image_proxy. +grandparent_thumb Returns the location of the show's thumbnail. Use with pms_image_proxy. added_at Returns the time when the media was added to the library. -title Returns the name of the movie or season. -parent_title Returns the name of the TV Show a season belongs too. - -== Only if 'media_type' is 'movie' == -year Returns the movie release year. DOCUMENTATION :: END @@ -30,7 +37,7 @@ DOCUMENTATION :: END
  • % if item['media_type'] == 'season' or item['media_type'] == 'movie': - +
    @@ -53,7 +60,7 @@ DOCUMENTATION :: END
    % elif item['media_type'] == 'album': - +
    diff --git a/data/interfaces/default/settings.html b/data/interfaces/default/settings.html index 37c59b5b..e7f0f4bd 100644 --- a/data/interfaces/default/settings.html +++ b/data/interfaces/default/settings.html @@ -98,25 +98,78 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
    - + +

    + Select the cards to show in the watch statistics on the home page. Select none to disable.
    + Drag the items below to reorder your homepage content. +

    - +
      +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    • +
      + +
    • +
    +
    -

    Select the cards to show in the watch statistics on the home page. Select none to disable.

    @@ -151,14 +204,17 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
    +

    + Select the cards to show in the library statistics on the home page. Select none to disable.
    + Drag the items below to reorder your homepage content. +

    - +
      +
    +
    -

    Select the cards to show in the library statistics on the home page. Select none to disable.

    @@ -348,7 +404,7 @@ available_notification_agents = sorted(notifiers.available_notification_agents()

    Friends List

    - +
    @@ -359,9 +415,29 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
    -

    Refresh the user list when PlexPy starts.

    +

    Refresh the users list when PlexPy starts.

    +
    + +
    +

    Libraries List

    +
    +
    + +
    +
    + +
    + +
    +

    The interval (in hours) PlexPy will request an updated libraries list from your Plex Media Server. 1 minimum, 24 maximum.

    +
    +
    + +

    Refresh the libraries list when PlexPy starts.

    @@ -377,6 +453,13 @@ available_notification_agents = sorted(notifiers.available_notification_agents()

    If you have media indexing enabled on your server, use these on the activity pane.

    +
    + +

    Enable if you want PlexPy to calculate the total file size for TV Shows/Seasons and Artists/Albums on the media info tables.
    + This is currently experimental.

    +

    PlexWatch Import Tool

    @@ -402,9 +485,10 @@ available_notification_agents = sorted(notifiers.available_notification_agents()
    -

    Instead of polling the server at regular intervals let the server tell us when something happens. This is currently experimental. Encrypted websocket is not currently supported.

    +

    Instead of polling the server at regular intervals let the server tell us when something happens.
    + This is currently experimental. Encrypted websocket is not currently supported.

    -
  • -

    @@ -782,7 +864,7 @@ available_notification_agents = sorted(notifiers.available_notification_agents() % for agent in available_notification_agents:
  • - % if agent['on_play'] or agent['on_stop'] or agent['on_pause'] or agent['on_resume'] or agent['on_buffer'] or agent['on_watched'] or agent['on_created'] or agent['on_extdown'] or agent['on_intdown']: + % if any(k[:2] == 'on' and v == 1 for k, v in agent.iteritems()): % else: @@ -1149,6 +1231,10 @@ available_notification_agents = sorted(notifiers.available_notification_agents() {title} The full title of the item being played. + + {library_name} + The library title of the item being played. + {show_name} The title of the TV series being played. @@ -1313,6 +1399,7 @@ available_notification_agents = sorted(notifiers.available_notification_agents() <%def name="javascriptIncludes()"> + +% endif + diff --git a/data/interfaces/default/user.html b/data/interfaces/default/user.html index 8c84a3b6..f5183b95 100644 --- a/data/interfaces/default/user.html +++ b/data/interfaces/default/user.html @@ -13,10 +13,12 @@ user_id Returns the user id of the user. username Returns the user's username. friendly_name Returns the friendly name of the user. email Returns the user's email address. -thumb Returns the thumbnail for the user. +user_thumb Returns the thumbnail for the user. is_home_user Returns bool value for whether the user is part of a Plex Home. is_allow_sync Returns bool value for whether the user has sync rights. is_restricted Returns bool value for whether the user account is restricted. +do_notify Returns bool value for whether to send notifications for the user. +keep_history Returns bool value for whether to keep history for the user. DOCUMENTATION :: END @@ -33,223 +35,261 @@ from plexpy import helpers -% if user != None: <%def name="body()"> +% if data:
    -
    -
    - -
    -
    -
    -
    +
    -
    -
    - Global Stats -
    -
    -
    -
    Loading data...
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - Player Stats -
    -
    -
    -
    -
    Loading data...
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - Recently Watched -
    -
    -
    -
    -
    Loading data...
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - IP Addresses for +
    -
    - - - - - - - - - - - -
    Last SeenIP AddressLast PlatformLast PlayerLast WatchedPlay Count
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - Watch History for - ${data['friendly_name']} - + +
    +
    +
    +
    +
    +
    +
    + Global Stats +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    -
    - - - +
    +
    +
    +
    +
    + Player Stats +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + Recently Watched +
    +
    +
    +
    +
    Loading data...
    +
    +
    +
    +
    +
    -
    - - - - - - - - - - - - - - - - - - - -
    DeleteTimeUserIP AddressPlatformPlayerTitleStartedPausedStoppedDuration
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - Synced Items for - ${data['friendly_name']} - -
    - -
    -
    - - -
    +% else: +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + Error retrieving user information. Please see the logs for more details. +
    +
    +
    +
    +
    +
    +% endif <%def name="javascriptIncludes()"> @@ -257,6 +297,7 @@ from plexpy import helpers +% if data: @@ -264,10 +305,10 @@ from plexpy import helpers - -% else: -
    -
    -
    -
    -

    Error retrieving user information. Please see the logs for more details.

    -
    -
    -
    -% endif \ No newline at end of file +% endif + \ No newline at end of file diff --git a/data/interfaces/default/user_player_stats.html b/data/interfaces/default/user_player_stats.html index 7aab53b6..6520cfec 100644 --- a/data/interfaces/default/user_player_stats.html +++ b/data/interfaces/default/user_player_stats.html @@ -18,7 +18,7 @@ total_plays Returns the play count for the player. DOCUMENTATION :: END -% if data != None: +% if data: % for a in data:
      @@ -39,6 +39,5 @@ DOCUMENTATION :: END % endfor % else: -
      Unable to retrieve data from database. Please check your settings. -

      +
      No stats to show.

      % endif \ No newline at end of file diff --git a/data/interfaces/default/user_recently_watched.html b/data/interfaces/default/user_recently_watched.html index 2e432203..d13652b9 100644 --- a/data/interfaces/default/user_recently_watched.html +++ b/data/interfaces/default/user_recently_watched.html @@ -12,29 +12,27 @@ data[array_index] :: Usable parameters == Global keys == rating_key Returns the unique identifier for the media item. row_id Returns the unique row id for the media item in the database. -type Returns the type of media. Either 'movie' or 'episode'. +media_type Returns the type of media. Either 'movie' or 'episode' or 'album'. thumb Returns the location of the item's thumbnail. Use with pms_image_proxy. time Returns the last watched time of the media. -title Returns the name of the movie or episode. - -== Only if 'type' is 'episode == -parent_title Returns the name of the TV Show a season belongs too. -parent_index Returns the season number. -index Returns the episode number. - -== Only if 'type' is 'movie' == -year Returns the movie release year. +user Returns the name of the user that watched the item. +title Returns the name of the movie, episode, album. +parent_title Returns the name of the artist. +grandparent_title Returns the name of the show. +media_index Returns the index number of the episode. +parent_media_index Returns the index number of the season. +year Returns the release year of the movie, episode, or album. DOCUMENTATION :: END -% if data != None: +% if data: % else: -
      +
      No stats to show.

      % endif \ No newline at end of file diff --git a/data/interfaces/default/user_watch_time_stats.html b/data/interfaces/default/user_watch_time_stats.html index fdaba5f6..ba2c86ec 100644 --- a/data/interfaces/default/user_watch_time_stats.html +++ b/data/interfaces/default/user_watch_time_stats.html @@ -16,7 +16,7 @@ total_plays Returns the play count for the watch stat period.. DOCUMENTATION :: END -% if data != None: +% if data:
        % for a in data:
        @@ -43,6 +43,6 @@ DOCUMENTATION :: END % endfor
      % else: -
      Unable to retrieve data from database. Please check your settings. +
      Unable to retrieve data from database.

      % endif \ No newline at end of file diff --git a/data/interfaces/default/users.html b/data/interfaces/default/users.html index c97c46e8..fb2600f3 100644 --- a/data/interfaces/default/users.html +++ b/data/interfaces/default/users.html @@ -77,7 +77,9 @@ url: 'get_user_list', type: 'POST', data: function ( d ) { - return { 'json_data': JSON.stringify( d ) }; + return { + json_data: JSON.stringify(d) + }; } } @@ -177,10 +179,13 @@ cache: false, async: true, success: function(data) { - showMsg(' User list refresh started...',false,true,2000,false) + showMsg(' Users list refresh started...', false, true, 2000, false); }, - error: function(jqXHR, textStatus, errorThrown) { - showMsg(' Unable to refresh user list.',false,true,2000,true) + complete: function (data) { + showMsg(' Users list refreshed.', false, true, 2000, false); + }, + error: function (jqXHR, textStatus, errorThrown) { + showMsg(' Unable to refresh users list.', false, true, 2000, true); } }); }); diff --git a/data/interfaces/default/welcome.html b/data/interfaces/default/welcome.html index 10654801..0b19eca1 100644 --- a/data/interfaces/default/welcome.html +++ b/data/interfaces/default/welcome.html @@ -168,8 +168,11 @@ from plexpy import common
      + + +
      @@ -369,41 +372,43 @@ from plexpy import common var pms_verified = false; var authenticated = false; - $("#verify-plex-server").click(function() { - var pms_ip = $("#pms_ip").val() - var pms_port = $("#pms_port").val() - var pms_identifier = $("#pms_identifier").val() - var pms_ssl = $("#pms_ssl").val() - var pms_is_remote = $("#pms_is_remote").val() - if (($("#pms_ip").val() !== '') || ($("#pms_port").val() !== '')) { - $("#pms-verify-status").html(' Validating server...'); - $('#pms-verify-status').fadeIn('fast'); - $.ajax({ - url: 'get_server_id', - data : { hostname: pms_ip, port: pms_port, identifier: pms_identifier, ssl: pms_ssl, remote: pms_is_remote }, - cache: true, - async: true, - timeout: 5000, - error: function(jqXHR, textStatus, errorThrown) { - $("#pms-verify-status").html(' This is not a Plex Server!'); - $('#pms-verify-status').fadeIn('fast'); - }, - success: function (xml) { - if ( $(xml).find('MediaContainer').attr('machineIdentifier') ) { - $("#pms_identifier").val($(xml).find('MediaContainer').attr('machineIdentifier')); - $("#pms-verify-status").html(' Server found!'); - $('#pms-verify-status').fadeIn('fast'); - pms_verified = true; - $("#pms_valid").val("valid"); - } else { + $("#verify-plex-server").click(function () { + if (!(pms_verified)) { + var pms_ip = $("#pms_ip").val().trim(); + var pms_port = $("#pms_port").val().trim(); + var pms_identifier = $("#pms_identifier").val(); + var pms_ssl = $("#pms_ssl").val(); + var pms_is_remote = $("#pms_is_remote").val(); + if ((pms_ip !== '') || (pms_port !== '')) { + $("#pms-verify-status").html(' Validating server...'); + $('#pms-verify-status').fadeIn('fast'); + $.ajax({ + url: 'get_server_id', + data: { hostname: pms_ip, port: pms_port, identifier: pms_identifier, ssl: pms_ssl, remote: pms_is_remote }, + cache: true, + async: true, + timeout: 5000, + error: function (jqXHR, textStatus, errorThrown) { $("#pms-verify-status").html(' This is not a Plex Server!'); $('#pms-verify-status').fadeIn('fast'); + }, + success: function (xml) { + if ($(xml).find('MediaContainer').attr('machineIdentifier')) { + $("#pms_identifier").val($(xml).find('MediaContainer').attr('machineIdentifier')); + $("#pms-verify-status").html(' Server found!'); + $('#pms-verify-status').fadeIn('fast'); + pms_verified = true; + $("#pms_valid").val("valid"); + } else { + $("#pms-verify-status").html(' This is not a Plex Server!'); + $('#pms-verify-status').fadeIn('fast'); + } } - } - }); - } else { - $("#pms-verify-status").html(' Please enter both fields.'); - $('#pms-verify-status').fadeIn('fast'); + }); + } else { + $("#pms-verify-status").html(' Please enter both fields.'); + $('#pms-verify-status').fadeIn('fast'); + } } }); @@ -423,20 +428,23 @@ from plexpy import common $("#pms-authenticate").click(function() { $("#pms-token-status").html(' Fetching token...'); $('#pms-token-status').fadeIn('fast'); - if (($("#pms_username").val() !== '') || ($("#pms_password").val() !== '')) { + var pms_username = $("#pms_username").val().trim(); + var pms_password = $("#pms_password").val().trim(); + if ((pms_username !== '') || (pms_password !== '')) { $.ajax({ type: "post", url: "https://plex.tv/users/sign_in.xml", dataType: 'xml', async: true, - headers: {'Content-Type': 'application/xml; charset=utf-8', - 'X-Plex-Device-Name': 'PlexPy', - 'X-Plex-Product': 'PlexPy', - 'X-Plex-Version': '${common.VERSION_NUMBER}', - 'X-Plex-Platform': '${common.PLATFORM}', - 'X-Plex-Platform-Version': '${common.PLATFORM_VERSION}', - 'X-Plex-Client-Identifier': '${config['pms_uuid']}', - 'Authorization': 'Basic ' + btoa($("#pms_username").val() + ':' + $("#pms_password").val()) + headers: { + 'Content-Type': 'application/xml; charset=utf-8', + 'X-Plex-Device-Name': 'PlexPy', + 'X-Plex-Product': 'PlexPy', + 'X-Plex-Version': '${common.VERSION_NUMBER}', + 'X-Plex-Platform': '${common.PLATFORM}', + 'X-Plex-Platform-Version': '${common.PLATFORM_VERSION}', + 'X-Plex-Client-Identifier': '${config["pms_uuid"]}', + 'Authorization': 'Basic ' + btoa(pms_username + ':' + pms_password) }, error: function(jqXHR, textStatus, errorThrown) { $("#pms-token-status").html(' Authentation failed!'); diff --git a/plexpy/__init__.py b/plexpy/__init__.py index bbd234da..2c55e72c 100644 --- a/plexpy/__init__.py +++ b/plexpy/__init__.py @@ -175,6 +175,10 @@ def initialize(config_file): if CONFIG.PMS_TOKEN and CONFIG.REFRESH_USERS_ON_STARTUP: plextv.refresh_users() + # Refresh the libraries list on startup + if CONFIG.PMS_TOKEN and CONFIG.REFRESH_LIBRARIES_ON_STARTUP: + pmsconnect.refresh_libraries() + # Store the original umask UMASK = os.umask(0) os.umask(UMASK) @@ -256,8 +260,6 @@ def initialize_scheduler(): Start the scheduled background tasks. Re-schedule if interval settings changed. """ - - with SCHED_LOCK: # Check if scheduler should be started @@ -280,7 +282,7 @@ def initialize_scheduler(): else: seconds = 0 - if CONFIG.PMS_IP and CONFIG.PMS_TOKEN: + if CONFIG.PMS_IP and CONFIG.PMS_TOKEN and CONFIG.UPDATE_SECTION_IDS != -1: schedule_job(plextv.get_real_pms_url, 'Refresh Plex Server URLs', hours=12, minutes=0, seconds=0) schedule_job(pmsconnect.get_server_friendly_name, 'Refresh Plex Server Name', @@ -311,8 +313,14 @@ def initialize_scheduler(): else: hours = 0 + if CONFIG.PMS_TOKEN: - schedule_job(plextv.refresh_users, 'Refresh users list', hours=hours, minutes=0, seconds=0) + schedule_job(plextv.refresh_users, 'Refresh users list', + hours=hours, minutes=0, seconds=0) + + if CONFIG.PMS_IP and CONFIG.PMS_TOKEN: + schedule_job(pmsconnect.refresh_libraries, 'Refresh libraries list', + hours=hours, minutes=0, seconds=0) # Start scheduler if start_jobs and len(SCHED.get_jobs()): @@ -369,7 +377,7 @@ def dbcheck(): # sessions table :: This is a temp table that logs currently active sessions c_db.execute( 'CREATE TABLE IF NOT EXISTS sessions (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'session_key INTEGER, rating_key INTEGER, media_type TEXT, started INTEGER, ' + 'session_key INTEGER, rating_key INTEGER, section_id INTEGER, media_type TEXT, started INTEGER, ' 'paused_counter INTEGER DEFAULT 0, state TEXT, user_id INTEGER, user TEXT, friendly_name TEXT, ' 'ip_address TEXT, machine_id TEXT, player TEXT, platform TEXT, title TEXT, parent_title TEXT, ' 'grandparent_title TEXT, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' @@ -405,11 +413,10 @@ def dbcheck(): 'CREATE TABLE IF NOT EXISTS session_history_metadata (id INTEGER PRIMARY KEY, ' 'rating_key INTEGER, parent_rating_key INTEGER, grandparent_rating_key INTEGER, ' 'title TEXT, parent_title TEXT, grandparent_title TEXT, full_title TEXT, media_index INTEGER, ' - 'parent_media_index INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, art TEXT, media_type TEXT, ' - 'year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, last_viewed_at INTEGER, ' - 'content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, duration INTEGER DEFAULT 0, guid TEXT, ' - 'directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)' - '' + 'parent_media_index INTEGER, section_id INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, ' + 'art TEXT, media_type TEXT, year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, ' + 'last_viewed_at INTEGER, content_rating TEXT, summary TEXT, tagline TEXT, rating TEXT, ' + 'duration INTEGER DEFAULT 0, guid TEXT, directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)' ) # users table :: This table keeps record of the friends list @@ -421,6 +428,23 @@ def dbcheck(): 'keep_history INTEGER DEFAULT 1, deleted_user INTEGER DEFAULT 0)' ) + # notify_log table :: This is a table which logs notifications sent + c_db.execute( + 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, ' + 'session_key INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' + 'agent_id INTEGER, agent_name TEXT, on_play INTEGER, on_stop INTEGER, on_watched INTEGER, ' + 'on_pause INTEGER, on_resume INTEGER, on_buffer INTEGER, on_created INTEGER)' + ) + + # library_sections table :: This table keeps record of the servers library sections + c_db.execute( + 'CREATE TABLE IF NOT EXISTS library_sections (id INTEGER PRIMARY KEY AUTOINCREMENT, ' + 'server_id TEXT, section_id INTEGER UNIQUE, section_name TEXT, section_type TEXT, ' + 'thumb TEXT, custom_thumb_url TEXT, art TEXT, count INTEGER, parent_count INTEGER, child_count INTEGER, ' + 'do_notify INTEGER DEFAULT 1, do_notify_created INTEGER DEFAULT 1, keep_history INTEGER DEFAULT 1, ' + 'deleted_section INTEGER DEFAULT 0)' + ) + # Upgrade sessions table from earlier versions try: c_db.execute('SELECT started from sessions') @@ -547,6 +571,59 @@ def dbcheck(): 'ALTER TABLE sessions ADD COLUMN transcode_height INTEGER' ) + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT buffer_count from sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0' + ) + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER' + ) + + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT last_paused from sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN last_paused INTEGER' + ) + + # Upgrade sessions table from earlier versions + try: + c_db.execute('SELECT section_id from sessions') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table sessions.") + c_db.execute( + 'ALTER TABLE sessions ADD COLUMN section_id INTEGER' + ) + + # Upgrade session_history table from earlier versions + try: + c_db.execute('SELECT reference_id from session_history') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table session_history.") + c_db.execute( + 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' + ) + # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id + c_db.execute( + 'UPDATE session_history ' \ + 'SET reference_id = (SELECT (CASE \ + WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ + SELECT MAX(id) FROM session_history \ + WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ + THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ + ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ + SELECT MAX(id) FROM session_history \ + WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ + 'FROM session_history AS t1 ' \ + 'WHERE t1.id = session_history.id) ' + ) + # Upgrade session_history_metadata table from earlier versions try: c_db.execute('SELECT full_title from session_history_metadata') @@ -565,13 +642,14 @@ def dbcheck(): 'ALTER TABLE session_history_metadata ADD COLUMN tagline TEXT' ) - # notify_log table :: This is a table which logs notifications sent - c_db.execute( - 'CREATE TABLE IF NOT EXISTS notify_log (id INTEGER PRIMARY KEY AUTOINCREMENT, ' - 'session_key INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, ' - 'agent_id INTEGER, agent_name TEXT, on_play INTEGER, on_stop INTEGER, on_watched INTEGER, ' - 'on_pause INTEGER, on_resume INTEGER, on_buffer INTEGER, on_created INTEGER)' - ) + # Upgrade session_history_metadata table from earlier versions + try: + c_db.execute('SELECT section_id from session_history_metadata') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table session_history_metadata.") + c_db.execute( + 'ALTER TABLE session_history_metadata ADD COLUMN section_id INTEGER' + ) # Upgrade users table from earlier versions try: @@ -591,6 +669,24 @@ def dbcheck(): 'ALTER TABLE users ADD COLUMN keep_history INTEGER DEFAULT 1' ) + # Upgrade users table from earlier versions + try: + c_db.execute('SELECT custom_avatar_url from users') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table users.") + c_db.execute( + 'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT' + ) + + # Upgrade users table from earlier versions + try: + c_db.execute('SELECT deleted_user from users') + except sqlite3.OperationalError: + logger.debug(u"Altering database. Updating database table users.") + c_db.execute( + 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0' + ) + # Upgrade notify_log table from earlier versions try: c_db.execute('SELECT on_pause from notify_log') @@ -615,74 +711,12 @@ def dbcheck(): 'ALTER TABLE notify_log ADD COLUMN on_created INTEGER' ) - # Upgrade sessions table from earlier versions - try: - c_db.execute('SELECT buffer_count from sessions') - except sqlite3.OperationalError: - logger.debug(u"Altering database. Updating database table sessions.") - c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_count INTEGER DEFAULT 0' - ) - c_db.execute( - 'ALTER TABLE sessions ADD COLUMN buffer_last_triggered INTEGER' - ) - - # Upgrade users table from earlier versions - try: - c_db.execute('SELECT custom_avatar_url from users') - except sqlite3.OperationalError: - logger.debug(u"Altering database. Updating database table users.") - c_db.execute( - 'ALTER TABLE users ADD COLUMN custom_avatar_url TEXT' - ) - - # Upgrade sessions table from earlier versions - try: - c_db.execute('SELECT last_paused from sessions') - except sqlite3.OperationalError: - logger.debug(u"Altering database. Updating database table sessions.") - c_db.execute( - 'ALTER TABLE sessions ADD COLUMN last_paused INTEGER' - ) - # Add "Local" user to database as default unauthenticated user. result = c_db.execute('SELECT id FROM users WHERE username = "Local"') if not result.fetchone(): logger.debug(u'User "Local" does not exist. Adding user.') c_db.execute('INSERT INTO users (user_id, username) VALUES (0, "Local")') - # Upgrade session_history table from earlier versions - try: - c_db.execute('SELECT reference_id from session_history') - except sqlite3.OperationalError: - logger.debug(u"Altering database. Updating database table session_history.") - c_db.execute( - 'ALTER TABLE session_history ADD COLUMN reference_id INTEGER DEFAULT 0' - ) - # Set reference_id to the first row where (user_id = previous row, rating_key != previous row) and user_id = user_id - c_db.execute( - 'UPDATE session_history ' \ - 'SET reference_id = (SELECT (CASE \ - WHEN (SELECT MIN(id) FROM session_history WHERE id > ( \ - SELECT MAX(id) FROM session_history \ - WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) IS NULL \ - THEN (SELECT MIN(id) FROM session_history WHERE (user_id = t1.user_id)) \ - ELSE (SELECT MIN(id) FROM session_history WHERE id > ( \ - SELECT MAX(id) FROM session_history \ - WHERE (user_id = t1.user_id AND rating_key <> t1.rating_key AND id < t1.id)) AND user_id = t1.user_id) END) ' \ - 'FROM session_history AS t1 ' \ - 'WHERE t1.id = session_history.id) ' - ) - - # Upgrade users table from earlier versions - try: - c_db.execute('SELECT deleted_user from users') - except sqlite3.OperationalError: - logger.debug(u"Altering database. Updating database table users.") - c_db.execute( - 'ALTER TABLE users ADD COLUMN deleted_user INTEGER DEFAULT 0' - ) - conn_db.commit() c_db.close() diff --git a/plexpy/activity_handler.py b/plexpy/activity_handler.py index a9a5b822..956cee73 100644 --- a/plexpy/activity_handler.py +++ b/plexpy/activity_handler.py @@ -16,7 +16,7 @@ import time import plexpy -from plexpy import logger, pmsconnect, activity_processor, threading, notification_handler +from plexpy import logger, pmsconnect, activity_processor, threading, notification_handler, helpers class ActivityHandler(object): @@ -165,8 +165,6 @@ class ActivityHandler(object): # This function receives events from our websocket connection def process(self): if self.is_valid_session(): - from plexpy import helpers - ap = activity_processor.ActivityProcessor() db_session = ap.get_session_by_key(session_key=self.get_session_key()) diff --git a/plexpy/activity_pinger.py b/plexpy/activity_pinger.py index ee0479d3..84474577 100644 --- a/plexpy/activity_pinger.py +++ b/plexpy/activity_pinger.py @@ -13,7 +13,7 @@ # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see . -from plexpy import logger, pmsconnect, plextv, notification_handler, database, helpers, activity_processor +from plexpy import logger, pmsconnect, plextv, notification_handler, database, helpers, activity_processor, libraries import threading import plexpy @@ -46,16 +46,7 @@ def check_active_sessions(ws_request=False): media_container = session_list['sessions'] # Check our temp table for what we must do with the new streams - db_streams = monitor_db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, ' - 'grandparent_title, user_id, user, friendly_name, ip_address, player, ' - 'platform, machine_id, parent_rating_key, grandparent_rating_key, state, ' - 'view_offset, duration, video_decision, audio_decision, width, height, ' - 'container, video_codec, audio_codec, bitrate, video_resolution, ' - 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, ' - 'transcode_container, transcode_video_codec, transcode_audio_codec, ' - 'transcode_audio_channels, transcode_width, transcode_height, ' - 'paused_counter, last_paused ' - 'FROM sessions') + db_streams = monitor_db.select('SELECT * FROM sessions') for stream in db_streams: if any(d['session_key'] == str(stream['session_key']) and d['rating_key'] == str(stream['rating_key']) for d in media_container): @@ -182,7 +173,7 @@ def check_active_sessions(ws_request=False): if int_ping_count == 3: # Fire off notifications threading.Thread(target=notification_handler.notify_timeline, - kwargs=dict(notify_action='intdown')).start() + kwargs=dict(notify_action='intdown')).start() def check_recently_added(): @@ -196,10 +187,16 @@ def check_recently_added(): pms_connect = pmsconnect.PmsConnect() recently_added_list = pms_connect.get_recently_added_details(count='10') + library_data = libraries.Libraries() if recently_added_list: recently_added = recently_added_list['recently_added'] for item in recently_added: + library_details = library_data.get_details(section_id=item['section_id']) + + if not library_details['do_notify_created']: + continue + metadata = [] if 0 < time_threshold - int(item['added_at']) <= time_interval: @@ -220,8 +217,12 @@ def check_recently_added(): % str(item['rating_key'])) if metadata: + if not plexpy.CONFIG.NOTIFY_RECENTLY_ADDED_GRANDPARENT: for item in metadata: + + library_details = library_data.get_details(section_id=item['section_id']) + if 0 < time_threshold - int(item['added_at']) <= time_interval: logger.debug(u"PlexPy Monitor :: Library item %s has been added to Plex." % str(item['rating_key'])) # Fire off notifications diff --git a/plexpy/activity_processor.py b/plexpy/activity_processor.py index 0a68f4d0..ebef871c 100644 --- a/plexpy/activity_processor.py +++ b/plexpy/activity_processor.py @@ -29,6 +29,7 @@ class ActivityProcessor(object): def write_session(self, session=None, notify=True): if session: values = {'session_key': session['session_key'], + 'section_id': session['section_id'], 'rating_key': session['rating_key'], 'media_type': session['media_type'], 'state': session['state'], @@ -97,10 +98,13 @@ class ActivityProcessor(object): self.db.upsert('sessions', ip_address, keys) def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0): - from plexpy import users + from plexpy import users, libraries user_data = users.Users() - user_details = user_data.get_user_friendly_name(user=session['user']) + user_details = user_data.get_details(user_id=session['user_id']) + + library_data = libraries.Libraries() + library_details = library_data.get_details(section_id=session['section_id']) if session: logging_enabled = False @@ -155,7 +159,10 @@ class ActivityProcessor(object): if not user_details['keep_history'] and not is_import: logging_enabled = False - logger.debug(u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % session['user']) + logger.debug(u"PlexPy ActivityProcessor :: History logging for user '%s' is disabled." % user_details['username']) + elif not library_details['keep_history'] and not is_import: + logging_enabled = False + logger.debug(u"PlexPy ActivityProcessor :: History logging for library '%s' is disabled." % library_details['section_name']) if logging_enabled: # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history table...") @@ -250,19 +257,20 @@ class ActivityProcessor(object): # logger.debug(u"PlexPy ActivityProcessor :: Attempting to write to session_history_metadata table...") query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \ 'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \ - 'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \ - 'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \ - 'tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ + 'parent_media_index, section_id, thumb, parent_thumb, grandparent_thumb, art, media_type, ' \ + 'year, originally_available_at, added_at, updated_at, last_viewed_at, content_rating, ' \ + 'summary, tagline, rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \ '(last_insert_rowid(), ' \ - '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' + '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)' args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'], session['title'], session['parent_title'], session['grandparent_title'], full_title, - metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'], - metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'], - metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], + metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'], + metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], session['media_type'], + metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], - metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']] + metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, + metadata['studio']] # logger.debug(u"PlexPy ActivityProcessor :: Writing session_history_metadata transaction...") self.db.action(query=query, args=args) @@ -330,15 +338,7 @@ class ActivityProcessor(object): def get_session_by_key(self, session_key=None): if str(session_key).isdigit(): - result = self.db.select('SELECT started, session_key, rating_key, media_type, title, parent_title, ' - 'grandparent_title, user_id, user, friendly_name, ip_address, player, ' - 'platform, machine_id, parent_rating_key, grandparent_rating_key, state, ' - 'view_offset, duration, video_decision, audio_decision, width, height, ' - 'container, video_codec, audio_codec, bitrate, video_resolution, ' - 'video_framerate, aspect_ratio, audio_channels, transcode_protocol, ' - 'transcode_container, transcode_video_codec, transcode_audio_codec, ' - 'transcode_audio_channels, transcode_width, transcode_height, ' - 'paused_counter, last_paused ' + result = self.db.select('SELECT * ' 'FROM sessions WHERE session_key = ? LIMIT 1', args=[session_key]) for session in result: if session: diff --git a/plexpy/api.py b/plexpy/api.py index d2d0d432..6f5c2882 100644 --- a/plexpy/api.py +++ b/plexpy/api.py @@ -323,7 +323,7 @@ class Api(object): custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]] data_factory = datafactory.DataFactory() - history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where) + history = data_factory.get_datatables_history(kwargs=kwargs, custom_where=custom_where) self.data = history return self.data diff --git a/plexpy/config.py b/plexpy/config.py index f5afd041..1564658b 100644 --- a/plexpy/config.py +++ b/plexpy/config.py @@ -97,6 +97,8 @@ _CONFIG_DEFINITIONS = { 'FACEBOOK_ON_INTUP': (int, 'Facebook', 0), 'FIRST_RUN_COMPLETE': (int, 'General', 0), 'FREEZE_DB': (int, 'General', 0), + 'GET_FILE_SIZES': (int, 'General', 0), + 'GET_FILE_SIZES_HOLD': (dict, 'General', {'section_ids': [], 'rating_keys': []}), 'GIT_BRANCH': (str, 'General', 'master'), 'GIT_PATH': (str, 'General', ''), 'GIT_USER': (str, 'General', 'drzoidberg33'), @@ -118,12 +120,12 @@ _CONFIG_DEFINITIONS = { 'GROWL_ON_INTDOWN': (int, 'Growl', 0), 'GROWL_ON_EXTUP': (int, 'Growl', 0), 'GROWL_ON_INTUP': (int, 'Growl', 0), - 'HOME_LIBRARY_CARDS': (str, 'General', 'library_statistics_first'), + 'HOME_LIBRARY_CARDS': (list, 'General', ['first_run']), 'HOME_STATS_LENGTH': (int, 'General', 30), 'HOME_STATS_TYPE': (int, 'General', 0), 'HOME_STATS_COUNT': (int, 'General', 5), - 'HOME_STATS_CARDS': (str, 'General', 'watch_statistics, top_tv, popular_tv, top_movies, popular_movies, ' \ - 'top_music, popular_music, last_watched, top_users, top_platforms, most_concurrent'), + 'HOME_STATS_CARDS': (list, 'General', ['top_tv', 'popular_tv', 'top_movies', 'popular_movies', 'top_music', \ + 'popular_music', 'last_watched', 'top_users', 'top_platforms', 'most_concurrent']), 'HTTPS_CERT': (str, 'General', ''), 'HTTPS_KEY': (str, 'General', ''), 'HTTP_HOST': (str, 'General', '0.0.0.0'), @@ -293,6 +295,8 @@ _CONFIG_DEFINITIONS = { 'PUSHOVER_ON_INTDOWN': (int, 'Pushover', 0), 'PUSHOVER_ON_EXTUP': (int, 'Pushover', 0), 'PUSHOVER_ON_INTUP': (int, 'Pushover', 0), + 'REFRESH_LIBRARIES_INTERVAL': (int, 'Monitoring', 12), + 'REFRESH_LIBRARIES_ON_STARTUP': (int, 'Monitoring', 1), 'REFRESH_USERS_INTERVAL': (int, 'Monitoring', 12), 'REFRESH_USERS_ON_STARTUP': (int, 'Monitoring', 1), 'SLACK_ENABLED': (int, 'Slack', 0), @@ -370,6 +374,7 @@ _CONFIG_DEFINITIONS = { 'TWITTER_ON_EXTUP': (int, 'Twitter', 0), 'TWITTER_ON_INTUP': (int, 'Twitter', 0), 'UPDATE_DB_INTERVAL': (int, 'General', 24), + 'UPDATE_SECTION_IDS': (int, 'General', 1), 'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1), 'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1), 'XBMC_ENABLED': (int, 'XBMC', 0), @@ -505,3 +510,17 @@ class Config(object): self.MOVIE_LOGGING_ENABLE = 0 self.TV_LOGGING_ENABLE = 0 self.CONFIG_VERSION = '1' + if self.CONFIG_VERSION == '1': + # Change home_stats_cards to list + if self.HOME_STATS_CARDS: + home_stats_cards = ''.join(self.HOME_STATS_CARDS).split(', ') + if 'watch_statistics' in home_stats_cards: + home_stats_cards.remove('watch_statistics') + self.HOME_STATS_CARDS = home_stats_cards + # Change home_library_cards to list + if self.HOME_LIBRARY_CARDS: + home_library_cards = ''.join(self.HOME_LIBRARY_CARDS).split(', ') + if 'library_statistics' in home_library_cards: + home_library_cards.remove('library_statistics') + self.HOME_LIBRARY_CARDS = home_library_cards + self.CONFIG_VERSION = '2' \ No newline at end of file diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py index a9fdf29e..3be50b84 100644 --- a/plexpy/datafactory.py +++ b/plexpy/datafactory.py @@ -26,7 +26,7 @@ class DataFactory(object): def __init__(self): pass - def get_history(self, kwargs=None, custom_where=None, grouping=0, watched_percent=85): + def get_datatables_history(self, kwargs=None, custom_where=None, grouping=0, watched_percent=85): data_tables = datatables.DataTables() group_by = ['session_history.reference_id'] if grouping else ['session_history.id'] @@ -81,8 +81,8 @@ class DataFactory(object): ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id']], kwargs=kwargs) - except: - logger.warn("Unable to execute database query for get_history.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_history: %s." % e) return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, @@ -98,12 +98,12 @@ class DataFactory(object): for item in history: filter_duration += int(item['duration']) - if item["media_type"] == 'episode' and item["parent_thumb"]: - thumb = item["parent_thumb"] - elif item["media_type"] == 'episode': - thumb = item["grandparent_thumb"] + if item['media_type'] == 'episode' and item['parent_thumb']: + thumb = item['parent_thumb'] + elif item['media_type'] == 'episode': + thumb = item['grandparent_thumb'] else: - thumb = item["thumb"] + thumb = item['thumb'] if item['percent_complete'] >= watched_percent: watched_status = 1 @@ -113,37 +113,37 @@ class DataFactory(object): watched_status = 0 # Rename Mystery platform names - platform = common.PLATFORM_NAME_OVERRIDES.get(item["platform"], item["platform"]) + platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform']) - row = {"reference_id": item["reference_id"], - "id": item["id"], - "date": item["date"], - "started": item["started"], - "stopped": item["stopped"], - "duration": item["duration"], - "paused_counter": item["paused_counter"], - "user_id": item["user_id"], - "user": item["user"], - "friendly_name": item["friendly_name"], - "platform": platform, - "player": item['player'], - "ip_address": item["ip_address"], - "media_type": item["media_type"], - "rating_key": item["rating_key"], - "parent_rating_key": item["parent_rating_key"], - "grandparent_rating_key": item["grandparent_rating_key"], - "full_title": item["full_title"], - "parent_title": item["parent_title"], - "year": item["year"], - "media_index": item["media_index"], - "parent_media_index": item["parent_media_index"], - "thumb": thumb, - "video_decision": item["video_decision"], - "audio_decision": item["audio_decision"], - "percent_complete": int(round(item['percent_complete'])), - "watched_status": watched_status, - "group_count": item["group_count"], - "group_ids": item["group_ids"] + row = {'reference_id': item['reference_id'], + 'id': item['id'], + 'date': item['date'], + 'started': item['started'], + 'stopped': item['stopped'], + 'duration': item['duration'], + 'paused_counter': item['paused_counter'], + 'user_id': item['user_id'], + 'user': item['user'], + 'friendly_name': item['friendly_name'], + 'platform': platform, + 'player': item['player'], + 'ip_address': item['ip_address'], + 'media_type': item['media_type'], + 'rating_key': item['rating_key'], + 'parent_rating_key': item['parent_rating_key'], + 'grandparent_rating_key': item['grandparent_rating_key'], + 'full_title': item['full_title'], + 'parent_title': item['parent_title'], + 'year': item['year'], + 'media_index': item['media_index'], + 'parent_media_index': item['parent_media_index'], + 'thumb': thumb, + 'video_decision': item['video_decision'], + 'audio_decision': item['audio_decision'], + 'percent_complete': int(round(item['percent_complete'])), + 'watched_status': watched_status, + 'group_count': item['group_count'], + 'group_ids': item['group_ids'] } rows.append(row) @@ -154,11 +154,11 @@ class DataFactory(object): 'draw': query['draw'], 'filter_duration': helpers.human_duration(filter_duration, sig='dhm'), 'total_duration': helpers.human_duration(total_duration, sig='dhm') - } + } return dict - def get_home_stats(self, grouping=0, time_range='30', stats_type=0, stats_count='5', stats_cards='', notify_watched_percent='85'): + def get_home_stats(self, grouping=0, time_range='30', stats_type=0, stats_count='5', stats_cards=[], notify_watched_percent='85'): monitor_db = database.MonitorDatabase() group_by = 'session_history.reference_id' if grouping else 'session_history.id' @@ -185,8 +185,8 @@ class DataFactory(object): 'ORDER BY %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: top_tv.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_tv: %s." % e) return None for item in result: @@ -229,8 +229,8 @@ class DataFactory(object): 'ORDER BY users_watched DESC, %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: popular_tv.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_tv: %s." % e) return None for item in result: @@ -270,8 +270,8 @@ class DataFactory(object): 'ORDER BY %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: top_movies.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_movies: %s." % e) return None for item in result: @@ -313,8 +313,8 @@ class DataFactory(object): 'ORDER BY users_watched DESC, %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: popular_movies.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_movies: %s." % e) return None for item in result: @@ -354,8 +354,8 @@ class DataFactory(object): 'ORDER BY %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: top_music.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_music: %s." % e) return None for item in result: @@ -398,8 +398,8 @@ class DataFactory(object): 'ORDER BY users_watched DESC, %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: popular_music.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: popular_music: %s." % e) return None for item in result: @@ -424,13 +424,13 @@ class DataFactory(object): elif stat == 'top_users': top_users = [] try: - query = 'SELECT t.user, t.user_id, t.custom_avatar_url as thumb, ' \ + query = 'SELECT t.user, t.user_id, t.user_thumb, t.custom_thumb, ' \ '(CASE WHEN t.friendly_name IS NULL THEN t.username ELSE t.friendly_name END) ' \ ' AS friendly_name, ' \ 'MAX(t.started) AS last_watch, COUNT(t.id) AS total_plays, SUM(t.d) AS total_duration ' \ 'FROM (SELECT *, SUM(CASE WHEN stopped > 0 THEN (stopped - started) - ' \ ' (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) ' \ - ' AS d ' \ + ' AS d, users.thumb AS user_thumb, users.custom_avatar_url AS custom_thumb ' \ ' FROM session_history ' \ ' JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ ' LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \ @@ -441,15 +441,17 @@ class DataFactory(object): 'ORDER BY %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: top_users.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_users: %s." % e) return None for item in result: - if not item['thumb'] or item['thumb'] == '': - user_thumb = common.DEFAULT_USER_THUMB + if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']: + user_thumb = item['custom_thumb'] + elif item['user_thumb']: + user_thumb = item['user_thumb'] else: - user_thumb = item['thumb'] + user_thumb = common.DEFAULT_USER_THUMB row = {'user': item['user'], 'user_id': item['user_id'], @@ -490,8 +492,8 @@ class DataFactory(object): 'ORDER BY %s DESC ' \ 'LIMIT %s ' % (time_range, group_by, sort_type, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: top_platforms.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: top_platforms: %s." % e) return None for item in result: @@ -542,8 +544,8 @@ class DataFactory(object): 'ORDER BY last_watch DESC ' \ 'LIMIT %s' % (time_range, group_by, notify_watched_percent, stats_count) result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_home_stats: last_watched.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: last_watched: %s." % e) return None for item in result: @@ -645,8 +647,8 @@ class DataFactory(object): result = monitor_db.select(query) if result: most_concurrent.append(calc_most_concurrent(title, result)) - except: - logger.warn("Unable to execute database query for get_home_stats: most_concurrent.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_home_stats: most_concurrent: %s." % e) return None home_stats.append({'stat_id': stat, @@ -654,6 +656,35 @@ class DataFactory(object): return home_stats + def get_library_stats(self, library_cards=[]): + monitor_db = database.MonitorDatabase() + + library_stats = [] + + for id in library_cards: + if id.isdigit(): + try: + query = 'SELECT section_id, section_name, section_type, thumb, count, parent_count, child_count ' \ + 'FROM library_sections ' \ + 'WHERE section_id = %s ' % id + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_library_stats: %s." % e) + return None + + for item in result: + library = {'section_id': item['section_id'], + 'section_name': item['section_name'], + 'section_type': item['section_type'], + 'thumb': item['thumb'], + 'count': item['count'], + 'parent_count': item['parent_count'], + 'child_count': item['child_count'] + } + library_stats.append(library) + + return library_stats + def get_stream_details(self, row_id=None): monitor_db = database.MonitorDatabase() @@ -696,86 +727,33 @@ class DataFactory(object): return stream_output - def get_recently_watched(self, user=None, user_id=None, limit='10'): - monitor_db = database.MonitorDatabase() - recently_watched = [] - - if not limit.isdigit(): - limit = '10' - - try: - if user_id: - query = 'SELECT session_history.id, session_history.media_type, session_history.rating_key, session_history.parent_rating_key, ' \ - 'title, parent_title, grandparent_title, thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, started, user ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ - ' ELSE session_history.rating_key END) ' \ - 'ORDER BY started DESC LIMIT ?' - result = monitor_db.select(query, args=[user_id, limit]) - elif user: - query = 'SELECT session_history.id, session_history.media_type, session_history.rating_key, session_history.parent_rating_key, ' \ - 'title, parent_title, grandparent_title, thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, started, user ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'WHERE user = ? ' \ - 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ - ' ELSE session_history.rating_key END) ' \ - 'ORDER BY started DESC LIMIT ?' - result = monitor_db.select(query, args=[user, limit]) - else: - query = 'SELECT session_history.id, session_history.media_type, session_history.rating_key, session_history.parent_rating_key, ' \ - 'title, parent_title, grandparent_title, thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ - 'year, started, user ' \ - 'FROM session_history_metadata ' \ - 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ - 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ - ' ELSE session_history.rating_key END) ' \ - 'ORDER BY started DESC LIMIT ?' - result = monitor_db.select(query, args=[limit]) - except: - logger.warn("Unable to execute database query for get_recently_watched.") - return None - - for row in result: - if row['media_type'] == 'episode' and row['parent_thumb']: - thumb = row['parent_thumb'] - elif row['media_type'] == 'episode': - thumb = row['grandparent_thumb'] - else: - thumb = row['thumb'] - - recent_output = {'row_id': row['id'], - 'type': row['media_type'], - 'rating_key': row['rating_key'], - 'title': row['title'], - 'parent_title': row['parent_title'], - 'grandparent_title': row['grandparent_title'], - 'thumb': thumb, - 'index': row['media_index'], - 'parent_index': row['parent_media_index'], - 'year': row['year'], - 'time': row['started'], - 'user': row['user'] - } - recently_watched.append(recent_output) - - return recently_watched - - def get_metadata_details(self, row_id): + def get_metadata_details(self, rating_key): monitor_db = database.MonitorDatabase() - if row_id: - query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ - 'full_title, media_index, parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, ' \ - 'year, originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, tagline, ' \ - 'rating, duration, guid, directors, writers, actors, genres, studio ' \ + if rating_key: + query = 'SELECT session_history_metadata.rating_key, session_history_metadata.parent_rating_key, ' \ + 'session_history_metadata.grandparent_rating_key, session_history_metadata.title, ' \ + 'session_history_metadata.parent_title, session_history_metadata.grandparent_title, ' \ + 'session_history_metadata.full_title, library_sections.section_name, ' \ + 'session_history_metadata.media_index, session_history_metadata.parent_media_index, ' \ + 'session_history_metadata.section_id, session_history_metadata.thumb, ' \ + 'session_history_metadata.parent_thumb, session_history_metadata.grandparent_thumb, ' \ + 'session_history_metadata.art, session_history_metadata.media_type, session_history_metadata.year, ' \ + 'session_history_metadata.originally_available_at, session_history_metadata.added_at, ' \ + 'session_history_metadata.updated_at, session_history_metadata.last_viewed_at, ' \ + 'session_history_metadata.content_rating, session_history_metadata.summary, ' \ + 'session_history_metadata.tagline, session_history_metadata.rating, session_history_metadata.duration, ' \ + 'session_history_metadata.guid, session_history_metadata.directors, session_history_metadata.writers, ' \ + 'session_history_metadata.actors, session_history_metadata.genres, session_history_metadata.studio, ' \ + 'session_history_media_info.container, session_history_media_info.bitrate, ' \ + 'session_history_media_info.video_codec, session_history_media_info.video_resolution, ' \ + 'session_history_media_info.video_framerate, session_history_media_info.audio_codec, ' \ + 'session_history_media_info.audio_channels ' \ 'FROM session_history_metadata ' \ - 'WHERE id = ?' - result = monitor_db.select(query=query, args=[row_id]) + 'JOIN library_sections ON session_history_metadata.section_id = library_sections.section_id ' \ + 'JOIN session_history_media_info ON session_history_metadata.id = session_history_media_info.id ' \ + 'WHERE session_history_metadata.rating_key = ?' + result = monitor_db.select(query=query, args=[rating_key]) else: result = [] @@ -791,9 +769,9 @@ class DataFactory(object): 'parent_rating_key': item['parent_rating_key'], 'grandparent_rating_key': item['grandparent_rating_key'], 'grandparent_title': item['grandparent_title'], - 'parent_index': item['parent_media_index'], + 'parent_media_index': item['parent_media_index'], 'parent_title': item['parent_title'], - 'index': item['media_index'], + 'media_index': item['media_index'], 'studio': item['studio'], 'title': item['title'], 'content_rating': item['content_rating'], @@ -814,88 +792,61 @@ class DataFactory(object): 'writers': writers, 'directors': directors, 'genres': genres, - 'actors': actors + 'actors': actors, + 'library_name': item['section_name'], + 'section_id': item['section_id'], + 'container': item['container'], + 'bitrate': item['bitrate'], + 'video_codec': item['video_codec'], + 'video_resolution': item['video_resolution'], + 'video_framerate': item['video_framerate'], + 'audio_codec': item['audio_codec'], + 'audio_channels': item['audio_channels'] } return metadata - def delete_session_history_rows(self, row_id=None): + def get_total_duration(self, custom_where=None): monitor_db = database.MonitorDatabase() - if row_id.isdigit(): - logger.info(u"PlexPy DataFactory :: Deleting row id %s from the session history database." % row_id) - session_history_del = \ - monitor_db.action('DELETE FROM session_history WHERE id = ?', [row_id]) - session_history_media_info_del = \ - monitor_db.action('DELETE FROM session_history_media_info WHERE id = ?', [row_id]) - session_history_metadata_del = \ - monitor_db.action('DELETE FROM session_history_metadata WHERE id = ?', [row_id]) - - return 'Deleted rows %s.' % row_id + # Split up custom wheres + if custom_where: + where = 'WHERE ' + ' AND '.join([w[0] + ' = "' + w[1] + '"' for w in custom_where]) else: - return 'Unable to delete rows. Input row not valid.' + where = '' + + try: + query = 'SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - ' \ + 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ + '%s ' % where + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_total_duration: %s." % e) + return None - def delete_all_user_history(self, user_id=None): + total_duration = 0 + for item in result: + total_duration = item['total_duration'] + + return total_duration + + def get_session_ip(self, session_key=''): monitor_db = database.MonitorDatabase() - if user_id.isdigit(): - logger.info(u"PlexPy DataFactory :: Deleting all history for user id %s from database." % user_id) - session_history_media_info_del = \ - monitor_db.action('DELETE FROM ' - 'session_history_media_info ' - 'WHERE session_history_media_info.id IN (SELECT session_history_media_info.id ' - 'FROM session_history_media_info ' - 'JOIN session_history ON session_history_media_info.id = session_history.id ' - 'WHERE session_history.user_id = ?)', [user_id]) - session_history_metadata_del = \ - monitor_db.action('DELETE FROM ' - 'session_history_metadata ' - 'WHERE session_history_metadata.id IN (SELECT session_history_metadata.id ' - 'FROM session_history_metadata ' - 'JOIN session_history ON session_history_metadata.id = session_history.id ' - 'WHERE session_history.user_id = ?)', [user_id]) - session_history_del = \ - monitor_db.action('DELETE FROM ' - 'session_history ' - 'WHERE session_history.user_id = ?', [user_id]) - - return 'Deleted all items for user_id %s.' % user_id + if session_key: + query = 'SELECT ip_address FROM sessions WHERE session_key = %d' % int(session_key) + result = monitor_db.select(query) else: - return 'Unable to delete items. Input user_id not valid.' + return None - def delete_user(self, user_id=None): - monitor_db = database.MonitorDatabase() + ip_address = 'N/A' - if user_id.isdigit(): - self.delete_all_user_history(user_id) - logger.info(u"PlexPy DataFactory :: Deleting user with id %s from database." % user_id) - monitor_db.action('UPDATE users SET deleted_user = 1 WHERE user_id = ?', [user_id]) - monitor_db.action('UPDATE users SET keep_history = 0 WHERE user_id = ?', [user_id]) - monitor_db.action('UPDATE users SET do_notify = 0 WHERE user_id = ?', [user_id]) + for item in result: + ip_address = item['ip_address'] - return 'Deleted user with id %s.' % user_id - else: - return 'Unable to delete user. Input user_id not valid.' - - def undelete_user(self, user_id=None, username=None): - monitor_db = database.MonitorDatabase() - - if user_id and user_id.isdigit(): - logger.info(u"PlexPy DataFactory :: Re-adding user with id %s to database." % user_id) - monitor_db.action('UPDATE users SET deleted_user = 0 WHERE user_id = ?', [user_id]) - monitor_db.action('UPDATE users SET keep_history = 1 WHERE user_id = ?', [user_id]) - monitor_db.action('UPDATE users SET do_notify = 1 WHERE user_id = ?', [user_id]) - - return 'Re-added user with id %s.' % user_id - elif username: - logger.info(u"PlexPy DataFactory :: Re-adding user with username %s to database." % username) - monitor_db.action('UPDATE users SET deleted_user = 0 WHERE username = ?', [username]) - monitor_db.action('UPDATE users SET keep_history = 1 WHERE username = ?', [username]) - monitor_db.action('UPDATE users SET do_notify = 1 WHERE username = ?', [username]) - - return 'Re-added user with username %s.' % username - else: - return 'Unable to re-add user. Input user_id or username not valid.' + return ip_address def get_search_query(self, rating_key=''): monitor_db = database.MonitorDatabase() @@ -984,8 +935,8 @@ class DataFactory(object): grandparent_rating_key = result[0]['grandparent_rating_key'] - except: - logger.warn("Unable to execute database query for get_rating_keys_list.") + except Exception as e: + logger.warn(u"PlexPy DataFactory :: Unable to execute database query for get_rating_keys_list: %s." % e) return {} query = 'SELECT rating_key, parent_rating_key, grandparent_rating_key, title, parent_title, grandparent_title, ' \ @@ -1028,7 +979,25 @@ class DataFactory(object): return key_list - def update_rating_key(self, old_key_list='', new_key_list='', media_type=''): + def delete_session_history_rows(self, row_id=None): + monitor_db = database.MonitorDatabase() + + if row_id.isdigit(): + logger.info(u"PlexPy DataFactory :: Deleting row id %s from the session history database." % row_id) + session_history_del = \ + monitor_db.action('DELETE FROM session_history WHERE id = ?', [row_id]) + session_history_media_info_del = \ + monitor_db.action('DELETE FROM session_history_media_info WHERE id = ?', [row_id]) + session_history_metadata_del = \ + monitor_db.action('DELETE FROM session_history_metadata WHERE id = ?', [row_id]) + + return 'Deleted rows %s.' % row_id + else: + return 'Unable to delete rows. Input row not valid.' + + def update_metadata(self, old_key_list='', new_key_list='', media_type=''): + from plexpy import pmsconnect + pms_connect = pmsconnect.PmsConnect() monitor_db = database.MonitorDatabase() # function to map rating keys pairs @@ -1036,8 +1005,7 @@ class DataFactory(object): pairs = {} for k, v in old.iteritems(): if k in new: - if v['rating_key'] != new[k]['rating_key']: - pairs.update({v['rating_key']: new[k]['rating_key']}) + pairs.update({v['rating_key']: new[k]['rating_key']}) if 'children' in old[k]: pairs.update(get_pairs(old[k]['children'], new[k]['children'])) @@ -1049,89 +1017,73 @@ class DataFactory(object): mapping = get_pairs(old_key_list, new_key_list) if mapping: - logger.info(u"PlexPy DataFactory :: Updating rating keys in the database.") + logger.info(u"PlexPy DataFactory :: Updating metadata in the database.") for old_key, new_key in mapping.iteritems(): - # check rating_key (3 tables) - monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_metadata SET rating_key = ? WHERE rating_key = ?', - [new_key, old_key]) + result = pms_connect.get_metadata_details(new_key) - # check parent_rating_key (2 tables) - monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?', - [new_key, old_key]) + if result: + metadata = result['metadata'] + if metadata['media_type'] == 'show' or metadata['media_type'] == 'artist': + # check grandparent_rating_key (2 tables) + monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', + [new_key, old_key]) + monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', + [new_key, old_key]) + elif metadata['media_type'] == 'season' or metadata['media_type'] == 'album': + # check parent_rating_key (2 tables) + monitor_db.action('UPDATE session_history SET parent_rating_key = ? WHERE parent_rating_key = ?', + [new_key, old_key]) + monitor_db.action('UPDATE session_history_metadata SET parent_rating_key = ? WHERE parent_rating_key = ?', + [new_key, old_key]) + else: + # check rating_key (2 tables) + monitor_db.action('UPDATE session_history SET rating_key = ? WHERE rating_key = ?', + [new_key, old_key]) + monitor_db.action('UPDATE session_history_media_info SET rating_key = ? WHERE rating_key = ?', + [new_key, old_key]) - # check grandparent_rating_key (2 tables) - monitor_db.action('UPDATE session_history SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', - [new_key, old_key]) - monitor_db.action('UPDATE session_history_metadata SET grandparent_rating_key = ? WHERE grandparent_rating_key = ?', - [new_key, old_key]) + # update session_history_metadata table + self.update_metadata_details(old_key, new_key, metadata) - # check thumb (1 table) - monitor_db.action('UPDATE session_history_metadata SET thumb = replace(thumb, ?, ?) \ - WHERE thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key, - [old_key, new_key]) - - # check parent_thumb (1 table) - monitor_db.action('UPDATE session_history_metadata SET parent_thumb = replace(parent_thumb, ?, ?) \ - WHERE parent_thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key, - [old_key, new_key]) - - # check grandparent_thumb (1 table) - monitor_db.action('UPDATE session_history_metadata SET grandparent_thumb = replace(grandparent_thumb, ?, ?) \ - WHERE grandparent_thumb LIKE "/library/metadata/%s/thumb/%%"' % old_key, - [old_key, new_key]) - - # check art (1 table) - monitor_db.action('UPDATE session_history_metadata SET art = replace(art, ?, ?) \ - WHERE art LIKE "/library/metadata/%s/art/%%"' % old_key, - [old_key, new_key]) - - return 'Updated rating key in database.' + return 'Updated metadata in database.' else: - return 'No updated rating key needed in database. No changes were made.' - # for debugging - #return mapping + return 'Unable to update metadata in database. No changes were made.' - def get_session_ip(self, session_key=''): - monitor_db = database.MonitorDatabase() + def update_metadata_details(self, old_rating_key='', new_rating_key='', metadata=None): - if session_key: - query = 'SELECT ip_address FROM sessions WHERE session_key = %d' % int(session_key) - result = monitor_db.select(query) - else: - return None + if metadata: + # Create full_title + if metadata['media_type'] == 'episode' or metadata['media_type'] == 'track': + full_title = '%s - %s' % (metadata['grandparent_title'], metadata['title']) + else: + full_title = metadata['title'] - ip_address = 'N/A' + directors = ";".join(metadata['directors']) + writers = ";".join(metadata['writers']) + actors = ";".join(metadata['actors']) + genres = ";".join(metadata['genres']) - for item in result: - ip_address = item['ip_address'] + #logger.info(u"PlexPy DataFactory :: Updating metadata in the database for rating key: %s." % new_rating_key) + monitor_db = database.MonitorDatabase() - return ip_address + # Update the session_history_metadata table + query = 'UPDATE session_history_metadata SET rating_key = ?, parent_rating_key = ?, ' \ + 'grandparent_rating_key = ?, title = ?, parent_title = ?, grandparent_title = ?, full_title = ?, ' \ + 'media_index = ?, parent_media_index = ?, section_id = ?, thumb = ?, parent_thumb = ?, ' \ + 'grandparent_thumb = ?, art = ?, media_type = ?, year = ?, originally_available_at = ?, ' \ + 'added_at = ?, updated_at = ?, last_viewed_at = ?, content_rating = ?, summary = ?, ' \ + 'tagline = ?, rating = ?, duration = ?, guid = ?, directors = ?, writers = ?, actors = ?, ' \ + 'genres = ?, studio = ? ' \ + 'WHERE rating_key = ?' - def get_total_duration(self, custom_where=None): - monitor_db = database.MonitorDatabase() + args = [metadata['rating_key'], metadata['parent_rating_key'], metadata['grandparent_rating_key'], + metadata['title'], metadata['parent_title'], metadata['grandparent_title'], full_title, + metadata['media_index'], metadata['parent_media_index'], metadata['section_id'], metadata['thumb'], + metadata['parent_thumb'], metadata['grandparent_thumb'], metadata['art'], metadata['media_type'], + metadata['year'], metadata['originally_available_at'], metadata['added_at'], metadata['updated_at'], + metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['tagline'], + metadata['rating'], metadata['duration'], metadata['guid'], directors, writers, actors, genres, + metadata['studio'], + old_rating_key] - # Split up custom wheres - if custom_where: - where = 'WHERE ' + ' AND '.join([w[0] + ' = "' + w[1] + '"' for w in custom_where]) - else: - where = '' - - try: - query = 'SELECT SUM(CASE WHEN stopped > 0 THEN (stopped - started) ELSE 0 END) - ' \ - 'SUM(CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) AS total_duration ' \ - 'FROM session_history %s ' % where - result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query for get_total_duration.") - return None - - for item in result: - total_duration = item['total_duration'] - - return total_duration + monitor_db.action(query=query, args=args) \ No newline at end of file diff --git a/plexpy/datatables.py b/plexpy/datatables.py index 3cf8f0ca..6c0a9b98 100644 --- a/plexpy/datatables.py +++ b/plexpy/datatables.py @@ -124,8 +124,9 @@ class DataTables(object): order += ', ' + order = order.rstrip(', ') if order: - order = 'ORDER BY ' + order.rstrip(', ') + order = 'ORDER BY ' + order # Build where parameters if parameters['search']['value']: diff --git a/plexpy/graphs.py b/plexpy/graphs.py index cb4e72f5..bcede4d5 100644 --- a/plexpy/graphs.py +++ b/plexpy/graphs.py @@ -32,10 +32,10 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT date(started, "unixepoch", "localtime") as date_played, ' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count ' \ + query = 'SELECT date(started, "unixepoch", "localtime") AS date_played, ' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count ' \ 'FROM session_history ' \ 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ 'GROUP BY date_played ' \ @@ -43,21 +43,21 @@ class Graphs(object): result = monitor_db.select(query) else: - query = 'SELECT date(started, "unixepoch", "localtime") as date_played, ' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count ' \ + query = 'SELECT date(started, "unixepoch", "localtime") AS date_played, ' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count ' \ 'FROM session_history ' \ 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ 'GROUP BY date_played ' \ 'ORDER BY started ASC' % time_range result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query.") + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_per_day: %s." % e) return None # create our date range as some days may not have any data @@ -108,49 +108,51 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT strftime("%w", datetime(started, "unixepoch", "localtime")) as daynumber, ' \ - 'case cast (strftime("%w", datetime(started, "unixepoch", "localtime")) as integer) ' \ - 'when 0 then "Sunday" ' \ - 'when 1 then "Monday" ' \ - 'when 2 then "Tuesday" ' \ - 'when 3 then "Wednesday" ' \ - 'when 4 then "Thursday" ' \ - 'when 5 then "Friday" ' \ - 'else "Saturday" end as dayofweek, ' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY daynumber' + try: + if y_axis == 'plays': + query = 'SELECT strftime("%%w", datetime(started, "unixepoch", "localtime")) AS daynumber, ' \ + '(CASE CAST(strftime("%%w", datetime(started, "unixepoch", "localtime")) AS INTEGER) ' \ + 'WHEN 0 THEN "Sunday" ' \ + 'WHEN 1 THEN "Monday" ' \ + 'WHEN 2 THEN "Tuesday" ' \ + 'WHEN 3 THEN "Wednesday" ' \ + 'WHEN 4 THEN "Thursday" ' \ + 'WHEN 5 THEN "Friday" ' \ + 'ELSE "Saturday" END) AS dayofweek, ' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY dayofweek ' \ + 'ORDER BY daynumber' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT strftime("%w", datetime(started, "unixepoch", "localtime")) as daynumber, ' \ - 'case cast (strftime("%w", datetime(started, "unixepoch", "localtime")) as integer) ' \ - 'when 0 then "Sunday" ' \ - 'when 1 then "Monday" ' \ - 'when 2 then "Tuesday" ' \ - 'when 3 then "Wednesday" ' \ - 'when 4 then "Thursday" ' \ - 'when 5 then "Friday" ' \ - 'else "Saturday" end as dayofweek, ' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") ' \ - 'GROUP BY dayofweek ' \ - 'ORDER BY daynumber' + result = monitor_db.select(query) + else: + query = 'SELECT strftime("%%w", datetime(started, "unixepoch", "localtime")) AS daynumber, ' \ + '(CASE CAST(strftime("%%w", datetime(started, "unixepoch", "localtime")) AS INTEGER) ' \ + 'WHEN 0 THEN "Sunday" ' \ + 'WHEN 1 THEN "Monday" ' \ + 'WHEN 2 THEN "Tuesday" ' \ + 'WHEN 3 THEN "Wednesday" ' \ + 'WHEN 4 THEN "Thursday" ' \ + 'WHEN 5 THEN "Friday" ' \ + 'ELSE "Saturday" END) AS dayofweek, ' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY dayofweek ' \ + 'ORDER BY daynumber' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_per_dayofweek: %s." % e) + return None days_list = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] @@ -197,33 +199,35 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'select strftime("%H", datetime(started, "unixepoch", "localtime")) as hourofday, ' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") ' \ - 'GROUP BY hourofday ' \ - 'ORDER BY hourofday' + try: + if y_axis == 'plays': + query = 'SELECT strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday, ' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY hourofday ' \ + 'ORDER BY hourofday' % time_range - result = monitor_db.select(query) - else: - query = 'select strftime("%H", datetime(started, "unixepoch", "localtime")) as hourofday, ' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") ' \ - 'GROUP BY hourofday ' \ - 'ORDER BY hourofday' + result = monitor_db.select(query) + else: + query = 'SELECT strftime("%%H", datetime(started, "unixepoch", "localtime")) AS hourofday, ' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'GROUP BY hourofday ' \ + 'ORDER BY hourofday' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_per_hourofday: %s." % e) + return None hours_list = ['00','01','02','03','04','05', '06','07','08','09','10','11', @@ -270,31 +274,36 @@ class Graphs(object): import time as time monitor_db = database.MonitorDatabase() - if y_axis == 'plays': - query = 'SELECT strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) as datestring, ' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(started, "unixepoch", "localtime") >= datetime("now", "-12 months", "localtime") ' \ - 'GROUP BY strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) ' \ - 'ORDER BY datestring DESC LIMIT 12' - result = monitor_db.select(query) - else: - query = 'SELECT strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) as datestring, ' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count ' \ - 'FROM session_history ' \ - 'WHERE datetime(started, "unixepoch", "localtime") >= datetime("now", "-12 months", "localtime") ' \ - 'GROUP BY strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) ' \ - 'ORDER BY datestring DESC LIMIT 12' + try: + if y_axis == 'plays': + query = 'SELECT strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) AS datestring, ' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(started, "unixepoch", "localtime") >= datetime("now", "-12 months", "localtime") ' \ + 'GROUP BY strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) ' \ + 'ORDER BY datestring DESC LIMIT 12' - result = monitor_db.select(query) + result = monitor_db.select(query) + else: + query = 'SELECT strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) AS datestring, ' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count ' \ + 'FROM session_history ' \ + 'WHERE datetime(started, "unixepoch", "localtime") >= datetime("now", "-12 months", "localtime") ' \ + 'GROUP BY strftime("%Y-%m", datetime(started, "unixepoch", "localtime")) ' \ + 'ORDER BY datestring DESC LIMIT 12' + + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_per_month: %s." % e) + return None # create our date range as some months may not have any data # but we still want to display them @@ -348,38 +357,40 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT platform, ' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count, ' \ - 'COUNT(id) as total_count ' \ - 'FROM session_history ' \ - 'WHERE (datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) ' \ - 'GROUP BY platform ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT platform, ' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count, ' \ + 'COUNT(id) AS total_count ' \ + 'FROM session_history ' \ + 'WHERE (datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime")) ' \ + 'GROUP BY platform ' \ + 'ORDER BY total_count DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT platform, ' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count, ' \ - 'SUM(case when stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'WHERE (datetime(stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) ' \ - 'GROUP BY platform ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT platform, ' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count, ' \ + 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'WHERE (datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime")) ' \ + 'GROUP BY platform ' \ + 'ORDER BY total_duration DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_by_top_10_platforms: %s." % e) + return None categories = [] series_1 = [] @@ -409,44 +420,44 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT ' \ - '(case when users.friendly_name is null then users.username else ' \ - 'users.friendly_name end) as friendly_name,' \ - 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" then 1 else 0 end) as music_count, ' \ - 'COUNT(session_history.id) as total_count ' \ - 'FROM session_history ' \ - 'JOIN users on session_history.user_id = users.user_id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) ' \ - 'GROUP BY session_history.user_id ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT ' \ + '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \ + 'SUM(CASE WHEN media_type = "episode" THEN 1 ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" THEN 1 ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" THEN 1 ELSE 0 END) AS music_count, ' \ + 'COUNT(session_history.id) AS total_count ' \ + 'FROM session_history ' \ + 'JOIN users ON session_history.user_id = users.user_id ' \ + 'WHERE (datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime")) ' \ + 'GROUP BY session_history.user_id ' \ + 'ORDER BY total_count DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT ' \ - '(case when users.friendly_name is null then users.username else ' \ - 'users.friendly_name end) as friendly_name,' \ - 'SUM(case when media_type = "episode" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tv_count, ' \ - 'SUM(case when media_type = "movie" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as movie_count, ' \ - 'SUM(case when media_type = "track" and stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as music_count, ' \ - 'SUM(case when stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'JOIN users on session_history.user_id = users.user_id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) ' \ - 'GROUP BY session_history.user_id ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT ' \ + '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS friendly_name,' \ + 'SUM(CASE WHEN media_type = "episode" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tv_count, ' \ + 'SUM(CASE WHEN media_type = "movie" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS movie_count, ' \ + 'SUM(CASE WHEN media_type = "track" AND stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS music_count, ' \ + 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN users ON session_history.user_id = users.user_id ' \ + 'WHERE (datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime")) ' \ + 'GROUP BY session_history.user_id ' \ + 'ORDER BY total_duration DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_by_top_10_users: %s." % e) + return None categories = [] series_1 = [] @@ -478,16 +489,13 @@ class Graphs(object): try: if y_axis == 'plays': - query = 'SELECT date(session_history.started, "unixepoch", "localtime") as date_played, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'then 1 else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'then 1 else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'then 1 else 0 end) as tc_count ' \ + query = 'SELECT date(session_history.started, "unixepoch", "localtime") AS date_played, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") THEN 1 ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") THEN 1 ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") THEN 1 ELSE 0 END) AS tc_count ' \ 'FROM session_history ' \ 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ @@ -498,19 +506,19 @@ class Graphs(object): result = monitor_db.select(query) else: - query = 'SELECT date(session_history.started, "unixepoch", "localtime") as date_played, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tc_count ' \ + query = 'SELECT date(session_history.started, "unixepoch", "localtime") AS date_played, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tc_count ' \ 'FROM session_history ' \ 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ 'WHERE datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ @@ -520,8 +528,8 @@ class Graphs(object): 'ORDER BY started ASC' % time_range result = monitor_db.select(query) - except: - logger.warn("Unable to execute database query.") + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_per_stream_type: %s." % e) return None # create our date range as some days may not have any data @@ -572,54 +580,55 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT session_history_media_info.video_resolution AS resolution, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'then 1 else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'then 1 else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'then 1 else 0 end) as tc_count, ' \ - 'COUNT(session_history.id) as total_count ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info on session_history.id = session_history_media_info.id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT session_history_media_info.video_resolution AS resolution, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") THEN 1 ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") THEN 1 ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") THEN 1 ELSE 0 END) AS tc_count, ' \ + 'COUNT(session_history.id) AS total_count ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime")) AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ + 'GROUP BY resolution ' \ + 'ORDER BY total_count DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT session_history_media_info.video_resolution AS resolution,' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tc_count, ' \ - 'SUM(case when stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info on session_history.id = session_history_media_info.id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT session_history_media_info.video_resolution AS resolution,' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tc_count, ' \ + 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime")) AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ + 'GROUP BY resolution ' \ + 'ORDER BY total_duration DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_by_source_resolution: %s." % e) + return None categories = [] series_1 = [] @@ -649,74 +658,75 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT ' \ - '(case when session_history_media_info.video_decision = "transcode" then ' \ - '(case ' \ - 'when session_history_media_info.transcode_height <= 360 then "sd" ' \ - 'when session_history_media_info.transcode_height <= 480 then "480" ' \ - 'when session_history_media_info.transcode_height <= 576 then "576" ' \ - 'when session_history_media_info.transcode_height <= 720 then "720" ' \ - 'when session_history_media_info.transcode_height <= 1080 then "1080" ' \ - 'when session_history_media_info.transcode_height <= 1440 then "QHD" ' \ - 'when session_history_media_info.transcode_height <= 2160 then "4K" ' \ - 'else "unknown" end) else session_history_media_info.video_resolution end) as resolution, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'then 1 else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'then 1 else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" '\ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'then 1 else 0 end) as tc_count, ' \ - 'COUNT(session_history.id) as total_count ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info on session_history.id = session_history_media_info.id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_count DESC ' \ - 'LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT ' \ + '(CASE WHEN session_history_media_info.video_decision = "transcode" THEN ' \ + '(CASE ' \ + 'WHEN session_history_media_info.transcode_height <= 360 THEN "sd" ' \ + 'WHEN session_history_media_info.transcode_height <= 480 THEN "480" ' \ + 'WHEN session_history_media_info.transcode_height <= 576 THEN "576" ' \ + 'WHEN session_history_media_info.transcode_height <= 720 THEN "720" ' \ + 'WHEN session_history_media_info.transcode_height <= 1080 THEN "1080" ' \ + 'WHEN session_history_media_info.transcode_height <= 1440 THEN "QHD" ' \ + 'WHEN session_history_media_info.transcode_height <= 2160 THEN "4K" ' \ + 'ELSE "unknown" END) ELSE session_history_media_info.video_resolution END) AS resolution, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") THEN 1 ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") THEN 1 ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" '\ + 'OR session_history_media_info.audio_decision = "transcode") THEN 1 ELSE 0 END) AS tc_count, ' \ + 'COUNT(session_history.id) AS total_count ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime")) AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ + 'GROUP BY resolution ' \ + 'ORDER BY total_count DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT ' \ - '(case when session_history_media_info.video_decision = "transcode" then ' \ - '(case ' \ - 'when session_history_media_info.transcode_height <= 360 then "sd" ' \ - 'when session_history_media_info.transcode_height <= 480 then "480" ' \ - 'when session_history_media_info.transcode_height <= 576 then "576" ' \ - 'when session_history_media_info.transcode_height <= 720 then "720" ' \ - 'when session_history_media_info.transcode_height <= 1080 then "1080" ' \ - 'when session_history_media_info.transcode_height <= 1440 then "QHD" ' \ - 'when session_history_media_info.transcode_height <= 2160 then "4K" ' \ - 'else "unknown" end) else session_history_media_info.video_resolution end) as resolution, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tc_count, ' \ - 'SUM(case when stopped > 0 then (stopped - started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info on session_history.id = session_history_media_info.id ' \ - 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime")) AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ - 'GROUP BY resolution ' \ - 'ORDER BY total_duration DESC ' \ - 'LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT ' \ + '(CASE WHEN session_history_media_info.video_decision = "transcode" THEN ' \ + '(CASE ' \ + 'WHEN session_history_media_info.transcode_height <= 360 THEN "sd" ' \ + 'WHEN session_history_media_info.transcode_height <= 480 THEN "480" ' \ + 'WHEN session_history_media_info.transcode_height <= 576 THEN "576" ' \ + 'WHEN session_history_media_info.transcode_height <= 720 THEN "720" ' \ + 'WHEN session_history_media_info.transcode_height <= 1080 THEN "1080" ' \ + 'WHEN session_history_media_info.transcode_height <= 1440 THEN "QHD" ' \ + 'WHEN session_history_media_info.transcode_height <= 2160 THEN "4K" ' \ + 'ELSE "unknown" END) ELSE session_history_media_info.video_resolution END) AS resolution, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tc_count, ' \ + 'SUM(CASE WHEN stopped > 0 THEN (stopped - started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE (datetime(session_history.stopped, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime")) AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie") ' \ + 'GROUP BY resolution ' \ + 'ORDER BY total_duration DESC ' \ + 'LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_total_plays_by_stream_resolution: %s." % e) + return None categories = [] series_1 = [] @@ -746,55 +756,54 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT ' \ - 'session_history.platform as platform, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'then 1 else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'then 1 else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'then 1 else 0 end) as tc_count, ' \ - 'COUNT(session_history.id) as total_count ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ - 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ - 'GROUP BY platform ' \ - 'ORDER BY total_count DESC LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT session_history.platform AS platform, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") THEN 1 ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") THEN 1 ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") THEN 1 ELSE 0 END) AS tc_count, ' \ + 'COUNT(session_history.id) AS total_count ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime") AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ + 'GROUP BY platform ' \ + 'ORDER BY total_count DESC LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT ' \ - 'session_history.platform as platform, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'and session_history_media_info.audio_decision = "transcode") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tc_count, ' \ - 'SUM(case when session_history.stopped > 0 ' \ - 'then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ - 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ - 'GROUP BY platform ' \ - 'ORDER BY total_duration DESC LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT session_history.platform AS platform, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'AND session_history_media_info.audio_decision = "transcode") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tc_count, ' \ + 'SUM(CASE WHEN session_history.stopped > 0 ' \ + 'THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime") AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ + 'GROUP BY platform ' \ + 'ORDER BY total_duration DESC LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_stream_type_by_top_10_platforms: %s." % e) + return None categories = [] series_1 = [] @@ -825,57 +834,58 @@ class Graphs(object): if not time_range.isdigit(): time_range = '30' - if y_axis == 'plays': - query = 'SELECT ' \ - 'CASE WHEN users.friendly_name is null then users.username else users.friendly_name end as username, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'then 1 else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'then 1 else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'or session_history_media_info.audio_decision = "transcode") ' \ - 'then 1 else 0 end) as tc_count, ' \ - 'COUNT(session_history.id) as total_count ' \ - 'FROM session_history ' \ - 'JOIN users ON session_history.user_id = users.user_id ' \ - 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ - 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ - 'GROUP BY username ' \ - 'ORDER BY total_count DESC LIMIT 10' + try: + if y_axis == 'plays': + query = 'SELECT ' \ + '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS username, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") THEN 1 ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") THEN 1 ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'OR session_history_media_info.audio_decision = "transcode") THEN 1 ELSE 0 END) AS tc_count, ' \ + 'COUNT(session_history.id) AS total_count ' \ + 'FROM session_history ' \ + 'JOIN users ON session_history.user_id = users.user_id ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime") AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ + 'GROUP BY username ' \ + 'ORDER BY total_count DESC LIMIT 10' % time_range - result = monitor_db.select(query) - else: - query = 'SELECT ' \ - 'CASE WHEN users.friendly_name is null then users.username else users.friendly_name end as username, ' \ - 'SUM(case when (session_history_media_info.video_decision = "direct play" ' \ - 'or session_history_media_info.audio_decision = "direct play") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as dp_count, ' \ - 'SUM(case when (session_history_media_info.video_decision != "transcode" ' \ - 'and session_history_media_info.audio_decision = "copy") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as ds_count, ' \ - 'SUM(case when (session_history_media_info.video_decision = "transcode" ' \ - 'and session_history_media_info.audio_decision = "transcode") ' \ - 'and session_history.stopped > 0 then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as tc_count, ' \ - 'SUM(case when session_history.stopped > 0 ' \ - 'then (session_history.stopped - session_history.started) ' \ - ' - (case when paused_counter is NULL then 0 else paused_counter end) else 0 end) as total_duration ' \ - 'FROM session_history ' \ - 'JOIN users ON session_history.user_id = users.user_id ' \ - 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ - 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ - 'datetime("now", "-' + time_range + ' days", "localtime") AND ' \ - '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ - 'GROUP BY username ' \ - 'ORDER BY total_duration DESC LIMIT 10' + result = monitor_db.select(query) + else: + query = 'SELECT ' \ + '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE users.friendly_name END) AS username, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "direct play" ' \ + 'OR session_history_media_info.audio_decision = "direct play") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS dp_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision != "transcode" ' \ + 'AND session_history_media_info.audio_decision = "copy") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS ds_count, ' \ + 'SUM(CASE WHEN (session_history_media_info.video_decision = "transcode" ' \ + 'AND session_history_media_info.audio_decision = "transcode") ' \ + 'AND session_history.stopped > 0 THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS tc_count, ' \ + 'SUM(CASE WHEN session_history.stopped > 0 ' \ + 'THEN (session_history.stopped - session_history.started) ' \ + ' - (CASE WHEN paused_counter IS NULL THEN 0 ELSE paused_counter END) ELSE 0 END) AS total_duration ' \ + 'FROM session_history ' \ + 'JOIN users ON session_history.user_id = users.user_id ' \ + 'JOIN session_history_media_info ON session_history.id = session_history_media_info.id ' \ + 'WHERE datetime(session_history.started, "unixepoch", "localtime") >= ' \ + 'datetime("now", "-%s days", "localtime") AND ' \ + '(session_history.media_type = "episode" OR session_history.media_type = "movie" OR session_history.media_type = "track") ' \ + 'GROUP BY username ' \ + 'ORDER BY total_duration DESC LIMIT 10' % time_range - result = monitor_db.select(query) + result = monitor_db.select(query) + except Exception as e: + logger.warn(u"PlexPy Graphs :: Unable to execute database query for get_stream_type_by_top_10_users: %s." % e) + return None categories = [] series_1 = [] diff --git a/plexpy/helpers.py b/plexpy/helpers.py index 6319f423..fe955dcb 100644 --- a/plexpy/helpers.py +++ b/plexpy/helpers.py @@ -150,7 +150,7 @@ def human_duration(s, sig='dhms'): hd = '' - if str(s).isdigit(): + if str(s).isdigit() and s > 0: d = int(s / 84600) h = int((s % 84600) / 3600) m = int(((s % 84600) % 3600) / 60) @@ -173,6 +173,8 @@ def human_duration(s, sig='dhms'): hd_list.append(str(s) + ' secs') hd = ' '.join(hd_list) + else: + hd = '0' return hd @@ -364,13 +366,18 @@ def create_https_certificates(ssl_cert, ssl_key): return True +def cast_to_int(s): + try: + return int(s) + except ValueError: + return -1 + def cast_to_float(s): try: return float(s) except ValueError: return -1 - def convert_xml_to_json(xml): o = xmltodict.parse(xml) return json.dumps(o) diff --git a/plexpy/libraries.py b/plexpy/libraries.py new file mode 100644 index 00000000..a400390f --- /dev/null +++ b/plexpy/libraries.py @@ -0,0 +1,835 @@ +# This file is part of PlexPy. +# +# PlexPy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# PlexPy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with PlexPy. If not, see . + +from plexpy import logger, datatables, common, database, helpers +import plexpy + +def update_section_ids(): + from plexpy import pmsconnect, activity_pinger + + plexpy.CONFIG.UPDATE_SECTION_IDS = -1 + + logger.info(u"PlexPy Libraries :: Updating section_id's in database.") + + logger.debug(u"PlexPy Libraries :: Disabling monitoring while update in progress.") + plexpy.schedule_job(activity_pinger.check_active_sessions, 'Check for active sessions', + hours=0, minutes=0, seconds=0) + plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items', + hours=0, minutes=0, seconds=0) + plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response', + hours=0, minutes=0, seconds=0) + + monitor_db = database.MonitorDatabase() + + try: + query = 'SELECT id, rating_key FROM session_history_metadata WHERE section_id IS NULL' + result = monitor_db.select(query=query) + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for update_section_ids: %s." % e) + + logger.debug(u"PlexPy Libraries :: Unable to update section_id's in database.") + plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 1) + plexpy.CONFIG.write() + + logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.") + plexpy.initialize_scheduler() + return None + + pms_connect = pmsconnect.PmsConnect() + + error_keys = set() + for item in result: + id = item['id'] + rating_key = item['rating_key'] + metadata = pms_connect.get_metadata_details(rating_key=rating_key) + + if metadata: + metadata = metadata['metadata'] + section_keys = {'id': id} + section_values = {'section_id': metadata['section_id']} + monitor_db.upsert('session_history_metadata', key_dict=section_keys, value_dict=section_values) + else: + error_keys.add(rating_key) + + if error_keys: + logger.debug(u"PlexPy Libraries :: Updated all section_id's in database except for rating_keys: %s." % + ', '.join(str(key) for key in error_keys)) + else: + logger.debug(u"PlexPy Libraries :: Updated all section_id's in database.") + + plexpy.CONFIG.__setattr__('UPDATE_SECTION_IDS', 0) + plexpy.CONFIG.write() + + logger.debug(u"PlexPy Libraries :: Re-enabling monitoring.") + plexpy.initialize_scheduler() + + return True + +class Libraries(object): + + def __init__(self): + pass + + def get_datatables_list(self, kwargs=None): + data_tables = datatables.DataTables() + + custom_where = ['library_sections.deleted_section', 0] + + columns = ['library_sections.section_id', + 'library_sections.section_name', + 'library_sections.section_type', + 'library_sections.count', + 'library_sections.parent_count', + 'library_sections.child_count', + 'library_sections.thumb AS library_thumb', + 'library_sections.custom_thumb_url AS custom_thumb', + 'library_sections.art', + 'COUNT(session_history.id) AS plays', + 'MAX(session_history.started) AS last_accessed', + 'session_history_metadata.full_title AS last_watched', + 'session_history_metadata.thumb', + 'session_history_metadata.parent_thumb', + 'session_history_metadata.grandparent_thumb', + 'session_history_metadata.media_type', + 'session_history.rating_key', + 'session_history_media_info.video_decision', + 'library_sections.do_notify', + 'library_sections.do_notify_created', + 'library_sections.keep_history' + ] + try: + query = data_tables.ssp_query(table_name='library_sections', + columns=columns, + custom_where=[custom_where], + group_by=['library_sections.section_id'], + join_types=['LEFT OUTER JOIN', + 'LEFT OUTER JOIN', + 'LEFT OUTER JOIN'], + join_tables=['session_history_metadata', + 'session_history', + 'session_history_media_info'], + join_evals=[['session_history_metadata.section_id', 'library_sections.section_id'], + ['session_history_metadata.id', 'session_history.id'], + ['session_history_metadata.id', 'session_history_media_info.id']], + kwargs=kwargs) + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_list: %s." % e) + return {'recordsFiltered': 0, + 'recordsTotal': 0, + 'draw': 0, + 'data': 'null', + 'error': 'Unable to execute database query.'} + + result = query['result'] + + rows = [] + for item in result: + if item['media_type'] == 'episode' and item['parent_thumb']: + thumb = item['parent_thumb'] + elif item['media_type'] == 'episode': + thumb = item['grandparent_thumb'] + else: + thumb = item['thumb'] + + if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']: + library_thumb = item['custom_thumb'] + elif item['library_thumb']: + library_thumb = item['library_thumb'] + else: + library_thumb = common.DEFAULT_COVER_THUMB + + row = {'section_id': item['section_id'], + 'section_name': item['section_name'], + 'section_type': item['section_type'].capitalize(), + 'count': item['count'], + 'parent_count': item['parent_count'], + 'child_count': item['child_count'], + 'library_thumb': library_thumb, + 'library_art': item['art'], + 'plays': item['plays'], + 'last_accessed': item['last_accessed'], + 'last_watched': item['last_watched'], + 'thumb': thumb, + 'media_type': item['media_type'], + 'rating_key': item['rating_key'], + 'video_decision': item['video_decision'], + 'do_notify': helpers.checked(item['do_notify']), + 'do_notify_created': helpers.checked(item['do_notify_created']), + 'keep_history': helpers.checked(item['keep_history']) + } + + rows.append(row) + + dict = {'recordsFiltered': query['filteredCount'], + 'recordsTotal': query['totalCount'], + 'data': rows, + 'draw': query['draw'] + } + + return dict + + def get_datatables_media_info(self, section_id=None, section_type=None, rating_key=None, refresh=False, kwargs=None): + from plexpy import pmsconnect + import json, os + + default_return = {'recordsFiltered': 0, + 'recordsTotal': 0, + 'draw': 0, + 'data': None, + 'error': 'Unable to execute database query.'} + + if section_id and not str(section_id).isdigit(): + logger.warn(u"PlexPy Libraries :: Datatable media info called by invalid section_id provided.") + return default_return + elif rating_key and not str(rating_key).isdigit(): + logger.warn(u"PlexPy Libraries :: Datatable media info called by invalid rating_key provided.") + return default_return + + # Get the library details + library_details = self.get_details(section_id=section_id) + if library_details['section_id'] == None: + logger.debug(u"PlexPy Libraries :: Library section_id %s not found." % section_id) + return default_return + + if not section_type: + section_type = library_details['section_type'] + + # Get play counts from the database + monitor_db = database.MonitorDatabase() + + if plexpy.CONFIG.GROUP_HISTORY_TABLES: + count_by = 'reference_id' + else: + count_by = 'id' + + if section_type == 'show' or section_type == 'artist': + group_by = 'grandparent_rating_key' + elif section_type == 'season' or section_type == 'album': + group_by = 'parent_rating_key' + else: + group_by = 'rating_key' + + try: + query = 'SELECT MAX(session_history.started) AS last_watched, COUNT(DISTINCT session_history.%s) AS play_count, ' \ + 'session_history.rating_key, session_history.parent_rating_key, session_history.grandparent_rating_key ' \ + 'FROM session_history ' \ + 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \ + 'WHERE session_history_metadata.section_id = ? ' \ + 'GROUP BY session_history.%s ' % (count_by, group_by) + result = monitor_db.select(query, args=[section_id]) + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_datatables_media_info2: %s." % e) + return default_return + + watched_list = {} + for item in result: + watched_list[str(item[group_by])] = {'last_watched': item['last_watched'], + 'play_count': item['play_count']} + + rows = [] + # Import media info cache from json file + if rating_key: + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(inFilePath, 'r') as inFile: + rows = json.load(inFile) + library_count = len(rows) + except IOError as e: + #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) + #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) + pass + elif section_id: + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(inFilePath, 'r') as inFile: + rows = json.load(inFile) + library_count = len(rows) + except IOError as e: + #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) + #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) + pass + + # If no cache was imported, get all library children items + cached_items = {d['rating_key']: d['file_size'] for d in rows} + + if refresh or not rows: + pms_connect = pmsconnect.PmsConnect() + + if rating_key: + library_children = pms_connect.get_library_children_details(rating_key=rating_key, + get_media_info=True) + elif section_id: + library_children = pms_connect.get_library_children_details(section_id=section_id, + section_type=section_type, + get_media_info=True) + + if library_children: + library_count = library_children['library_count'] + children_list = library_children['childern_list'] + else: + logger.warn(u"PlexPy Libraries :: Unable to get a list of library items.") + return default_return + + for item in children_list: + cached_file_size = cached_items.get(item['rating_key'], None) + file_size = cached_file_size if cached_file_size else item.get('file_size', '') + + row = {'section_id': library_details['section_id'], + 'section_type': library_details['section_type'], + 'added_at': item['added_at'], + 'media_type': item['media_type'], + 'rating_key': item['rating_key'], + 'parent_rating_key': item['parent_rating_key'], + 'grandparent_rating_key': item['grandparent_rating_key'], + 'title': item['title'], + 'year': item['year'], + 'media_index': item['media_index'], + 'parent_media_index': item['parent_media_index'], + 'thumb': item['thumb'], + 'container': item.get('container', ''), + 'bitrate': item.get('bitrate', ''), + 'video_codec': item.get('video_codec', ''), + 'video_resolution': item.get('video_resolution', ''), + 'video_framerate': item.get('video_framerate', ''), + 'audio_codec': item.get('audio_codec', ''), + 'audio_channels': item.get('audio_channels', ''), + 'file_size': file_size + } + rows.append(row) + + if not rows: + return default_return + + # Cache the media info to a json file + if rating_key: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(outFilePath, 'w') as outFile: + json.dump(rows, outFile) + except IOError as e: + logger.debug(u"PlexPy Libraries :: Unable to create cache file for rating_key %s." % rating_key) + elif section_id: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(outFilePath, 'w') as outFile: + json.dump(rows, outFile) + except IOError as e: + logger.debug(u"PlexPy Libraries :: Unable to create cache file for section_id %s." % section_id) + + # Update the last_watched and play_count + for item in rows: + watched_item = watched_list.get(item['rating_key'], None) + if watched_item: + item['last_watched'] = watched_item['last_watched'] + item['play_count'] = watched_item['play_count'] + else: + item['last_watched'] = None + item['play_count'] = None + + results = [] + + # Get datatables JSON data + if kwargs.get('json_data'): + json_data = helpers.process_json_kwargs(json_kwargs=kwargs.get('json_data')) + #print json_data + + # Search results + search_value = json_data['search']['value'].lower() + if search_value: + searchable_columns = [d['data'] for d in json_data['columns'] if d['searchable']] + for row in rows: + for k,v in row.iteritems(): + if k in searchable_columns and search_value in v.lower(): + results.append(row) + break + else: + results = rows + + filtered_count = len(results) + + # Sort results + results = sorted(results, key=lambda k: k['title']) + sort_order = json_data['order'] + for order in reversed(sort_order): + sort_key = json_data['columns'][int(order['column'])]['data'] + reverse = True if order['dir'] == 'desc' else False + if rating_key and sort_key == 'title': + results = sorted(results, key=lambda k: helpers.cast_to_int(k['media_index']), reverse=reverse) + elif sort_key == 'file_size' or sort_key == 'bitrate': + results = sorted(results, key=lambda k: helpers.cast_to_int(k[sort_key]), reverse=reverse) + else: + results = sorted(results, key=lambda k: k[sort_key], reverse=reverse) + + total_file_size = sum([helpers.cast_to_int(d['file_size']) for d in results]) + + # Paginate results + results = results[json_data['start']:(json_data['start'] + json_data['length'])] + + filtered_file_size = sum([helpers.cast_to_int(d['file_size']) for d in results]) + + dict = {'recordsFiltered': filtered_count, + 'recordsTotal': library_count, + 'data': results, + 'draw': int(json_data['draw']), + 'filtered_file_size': filtered_file_size, + 'total_file_size': total_file_size + } + + return dict + + def get_media_info_file_sizes(self, section_id=None, rating_key=None): + from plexpy import pmsconnect + import json, os + + if section_id and not str(section_id).isdigit(): + logger.warn(u"PlexPy Libraries :: Datatable media info file size called by invalid section_id provided.") + return False + elif rating_key and not str(rating_key).isdigit(): + logger.warn(u"PlexPy Libraries :: Datatable media info file size called by invalid rating_key provided.") + return False + + # Get the library details + library_details = self.get_details(section_id=section_id) + if library_details['section_id'] == None: + logger.debug(u"PlexPy Libraries :: Library section_id %s not found." % section_id) + return False + if library_details['section_type'] == 'photo': + return False + + rows = [] + # Import media info cache from json file + if rating_key: + #logger.debug(u"PlexPy Libraries :: Getting file sizes for rating_key %s." % rating_key) + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(inFilePath, 'r') as inFile: + rows = json.load(inFile) + except IOError as e: + #logger.debug(u"PlexPy Libraries :: No JSON file for rating_key %s." % rating_key) + #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for rating_key %s." % rating_key) + pass + elif section_id: + logger.debug(u"PlexPy Libraries :: Getting file sizes for section_id %s." % section_id) + try: + inFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(inFilePath, 'r') as inFile: + rows = json.load(inFile) + except IOError as e: + #logger.debug(u"PlexPy Libraries :: No JSON file for library section_id %s." % section_id) + #logger.debug(u"PlexPy Libraries :: Refreshing data and creating new JSON file for section_id %s." % section_id) + pass + + # Get the total file size for each item + pms_connect = pmsconnect.PmsConnect() + + for item in rows: + if item['rating_key'] and not item['file_size']: + file_size = 0 + + child_metadata = pms_connect.get_metadata_children_details(rating_key=item['rating_key'], + get_children=True, + get_media_info=True) + metadata_list = child_metadata['metadata'] + + for child_metadata in metadata_list: + file_size += helpers.cast_to_int(child_metadata.get('file_size', 0)) + + item['file_size'] = file_size + + # Cache the media info to a json file + if rating_key: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s-%s.json' % (section_id, rating_key)) + with open(outFilePath, 'w') as outFile: + json.dump(rows, outFile) + except IOError as e: + logger.debug(u"PlexPy Libraries :: Unable to create cache file with file sizes for rating_key %s." % rating_key) + elif section_id: + try: + outFilePath = os.path.join(plexpy.CONFIG.CACHE_DIR,'media_info_%s.json' % section_id) + with open(outFilePath, 'w') as outFile: + json.dump(rows, outFile) + except IOError as e: + logger.debug(u"PlexPy Libraries :: Unable to create cache file with file sizes for section_id %s." % section_id) + + if rating_key: + #logger.debug(u"PlexPy Libraries :: File sizes updated for rating_key %s." % rating_key) + pass + elif section_id: + logger.debug(u"PlexPy Libraries :: File sizes updated for section_id %s." % section_id) + + return True + + def set_config(self, section_id=None, custom_thumb='', do_notify=1, keep_history=1, do_notify_created=1): + if section_id: + monitor_db = database.MonitorDatabase() + + key_dict = {'section_id': section_id} + value_dict = {'custom_thumb_url': custom_thumb, + 'do_notify': do_notify, + 'do_notify_created': do_notify_created, + 'keep_history': keep_history} + try: + monitor_db.upsert('library_sections', value_dict, key_dict) + except: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for set_config: %s." % e) + + def get_details(self, section_id=None): + from plexpy import pmsconnect + + monitor_db = database.MonitorDatabase() + + try: + if section_id: + query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \ + 'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art, ' \ + 'do_notify, do_notify_created, keep_history ' \ + 'FROM library_sections ' \ + 'WHERE section_id = ? ' + result = monitor_db.select(query, args=[section_id]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_details: %s." % e) + result = [] + + if result: + library_details = {} + for item in result: + if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']: + library_thumb = item['custom_thumb'] + elif item['library_thumb']: + library_thumb = item['library_thumb'] + else: + library_thumb = common.DEFAULT_COVER_THUMB + + library_details = {'section_id': item['section_id'], + 'section_name': item['section_name'], + 'section_type': item['section_type'], + 'library_thumb': library_thumb, + 'library_art': item['art'], + 'count': item['count'], + 'parent_count': item['parent_count'], + 'child_count': item['child_count'], + 'do_notify': item['do_notify'], + 'do_notify_created': item['do_notify_created'], + 'keep_history': item['keep_history'] + } + return library_details + else: + logger.warn(u"PlexPy Libraries :: Unable to retrieve library from local database. Requesting library list refresh.") + # Let's first refresh the user list to make sure the user isn't newly added and not in the db yet + try: + if section_id: + # Refresh libraries + pmsconnect.refresh_libraries() + query = 'SELECT section_id, section_name, section_type, count, parent_count, child_count, ' \ + 'thumb AS library_thumb, custom_thumb_url AS custom_thumb, art, ' \ + 'do_notify, do_notify_created, keep_history ' \ + 'FROM library_sections ' \ + 'WHERE section_id = ? ' + result = monitor_db.select(query, args=[section_id]) + else: + result = [] + except: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_details: %s." % e) + result = [] + + if result: + library_details = {} + for item in result: + if item['custom_thumb'] and item['custom_thumb'] != item['library_thumb']: + library_thumb = item['custom_thumb'] + elif item['library_thumb']: + library_thumb = item['library_thumb'] + else: + library_thumb = common.DEFAULT_COVER_THUMB + + library_details = {'section_id': item['section_id'], + 'section_name': item['section_name'], + 'section_type': item['section_type'], + 'library_thumb': library_thumb, + 'library_art': item['art'], + 'count': item['count'], + 'parent_count': item['parent_count'], + 'child_count': item['child_count'], + 'do_notify': item['do_notify'], + 'do_notify_created': item['do_notify_created'], + 'keep_history': item['keep_history'] + } + return library_details + else: + # If there is no library data we must return something + # Use "Local" user to retain compatibility with PlexWatch database value + return {'section_id': None, + 'section_name': 'Local', + 'section_type': '', + 'library_thumb': common.DEFAULT_COVER_THUMB, + 'library_art': '', + 'count': 0, + 'parent_count': 0, + 'child_count': 0, + 'do_notify': 0, + 'do_notify_created': 0, + 'keep_history': 0 + } + + def get_watch_time_stats(self, section_id=None): + monitor_db = database.MonitorDatabase() + + time_queries = [1, 7, 30, 0] + library_watch_time_stats = [] + + for days in time_queries: + try: + if days > 0: + if str(section_id).isdigit(): + query = 'SELECT (SUM(stopped - started) - ' \ + 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ + 'COUNT(session_history.id) AS total_plays ' \ + 'FROM session_history ' \ + 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'AND section_id = ?' % days + result = monitor_db.select(query, args=[section_id]) + else: + result = [] + else: + if str(section_id).isdigit(): + query = 'SELECT (SUM(stopped - started) - ' \ + 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ + 'COUNT(session_history.id) AS total_plays ' \ + 'FROM session_history ' \ + 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ + 'WHERE section_id = ?' + result = monitor_db.select(query, args=[section_id]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_watch_time_stats: %s." % e) + result = [] + + for item in result: + if item['total_time']: + total_time = item['total_time'] + total_plays = item['total_plays'] + else: + total_time = 0 + total_plays = 0 + + row = {'query_days': days, + 'total_time': total_time, + 'total_plays': total_plays + } + + library_watch_time_stats.append(row) + + return library_watch_time_stats + + def get_user_stats(self, section_id=None): + monitor_db = database.MonitorDatabase() + + user_stats = [] + + try: + if str(section_id).isdigit(): + query = 'SELECT (CASE WHEN users.friendly_name IS NULL THEN users.username ' \ + 'ELSE users.friendly_name END) AS user, users.user_id, users.thumb, COUNT(user) AS user_count ' \ + 'FROM session_history ' \ + 'JOIN session_history_metadata ON session_history_metadata.id = session_history.id ' \ + 'JOIN users ON users.user_id = session_history.user_id ' \ + 'WHERE section_id = ? ' \ + 'GROUP BY user ' \ + 'ORDER BY user_count DESC' + result = monitor_db.select(query, args=[section_id]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_user_stats: %s." % e) + result = [] + + for item in result: + row = {'user': item['user'], + 'user_id': item['user_id'], + 'thumb': item['thumb'], + 'total_plays': item['user_count'] + } + user_stats.append(row) + + return user_stats + + def get_recently_watched(self, section_id=None, limit='10'): + monitor_db = database.MonitorDatabase() + recently_watched = [] + + if not limit.isdigit(): + limit = '10' + + try: + if str(section_id).isdigit(): + query = 'SELECT session_history.id, session_history.media_type, session_history.rating_key, session_history.parent_rating_key, ' \ + 'title, parent_title, grandparent_title, thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ + 'year, started, user ' \ + 'FROM session_history_metadata ' \ + 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ + 'WHERE section_id = ? ' \ + 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ + ' ELSE session_history.rating_key END) ' \ + 'ORDER BY started DESC LIMIT ?' + result = monitor_db.select(query, args=[section_id, limit]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_recently_watched: %s." % e) + result = [] + + for row in result: + if row['media_type'] == 'episode' and row['parent_thumb']: + thumb = row['parent_thumb'] + elif row['media_type'] == 'episode': + thumb = row['grandparent_thumb'] + else: + thumb = row['thumb'] + + recent_output = {'row_id': row['id'], + 'media_type': row['media_type'], + 'rating_key': row['rating_key'], + 'title': row['title'], + 'parent_title': row['parent_title'], + 'grandparent_title': row['grandparent_title'], + 'thumb': thumb, + 'media_index': row['media_index'], + 'parent_media_index': row['parent_media_index'], + 'year': row['year'], + 'time': row['started'], + 'user': row['user'] + } + recently_watched.append(recent_output) + + return recently_watched + + def get_sections(self): + monitor_db = database.MonitorDatabase() + + try: + query = 'SELECT section_id, section_name FROM library_sections WHERE deleted_section = 0' + result = monitor_db.select(query=query) + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for get_sections: %s." % e) + return None + + libraries = [] + for item in result: + library = {'section_id': item['section_id'], + 'section_name': item['section_name'] + } + libraries.append(library) + + return libraries + + def delete_all_history(self, section_id=None): + monitor_db = database.MonitorDatabase() + + try: + if section_id.isdigit(): + logger.info(u"PlexPy Libraries :: Deleting all history for library id %s from database." % section_id) + session_history_media_info_del = \ + monitor_db.action('DELETE FROM ' + 'session_history_media_info ' + 'WHERE session_history_media_info.id IN (SELECT session_history_media_info.id ' + 'FROM session_history_media_info ' + 'JOIN session_history_metadata ON session_history_media_info.id = session_history_metadata.id ' + 'WHERE session_history_metadata.section_id = ?)', [section_id]) + session_history_del = \ + monitor_db.action('DELETE FROM ' + 'session_history ' + 'WHERE session_history.id IN (SELECT session_history.id ' + 'FROM session_history ' + 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' + 'WHERE session_history_metadata.section_id = ?)', [section_id]) + session_history_metadata_del = \ + monitor_db.action('DELETE FROM ' + 'session_history_metadata ' + 'WHERE session_history_metadata.section_id = ?', [section_id]) + + return 'Deleted all items for section_id %s.' % section_id + else: + return 'Unable to delete items, section_id not valid.' + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for delete_all_history: %s." % e) + + def delete(self, section_id=None): + monitor_db = database.MonitorDatabase() + + try: + if section_id.isdigit(): + self.delete_all_history(section_id) + logger.info(u"PlexPy Libraries :: Deleting library with id %s from database." % section_id) + monitor_db.action('UPDATE library_sections SET deleted_section = 1 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET keep_history = 0 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET do_notify = 0 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET do_notify_created = 0 WHERE section_id = ?', [section_id]) + + library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS + if section_id in library_cards: + library_cards.remove(section_id) + plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_cards) + plexpy.CONFIG.write() + + return 'Deleted library with id %s.' % section_id + else: + return 'Unable to delete library, section_id not valid.' + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for delete: %s." % e) + + def undelete(self, section_id=None, section_name=None): + monitor_db = database.MonitorDatabase() + + try: + if section_id and section_id.isdigit(): + logger.info(u"PlexPy Libraries :: Re-adding library with id %s to database." % section_id) + monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_id = ?', [section_id]) + monitor_db.action('UPDATE library_sections SET do_notify_created = 1 WHERE section_id = ?', [section_id]) + + return 'Re-added library with id %s.' % section_id + elif section_name: + logger.info(u"PlexPy Libraries :: Re-adding library with name %s to database." % section_name) + monitor_db.action('UPDATE library_sections SET deleted_section = 0 WHERE section_name = ?', [section_name]) + monitor_db.action('UPDATE library_sections SET keep_history = 1 WHERE section_name = ?', [section_name]) + monitor_db.action('UPDATE library_sections SET do_notify = 1 WHERE section_name = ?', [section_name]) + monitor_db.action('UPDATE library_sections SET do_notify_created = 1 WHERE section_name = ?', [section_name]) + + return 'Re-added library with section_name %s.' % section_name + else: + return 'Unable to re-add library, section_id or section_name not valid.' + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to execute database query for undelete: %s." % e) + + def delete_datatable_media_info_cache(self, section_id=None): + import os + + try: + if section_id.isdigit(): + [os.remove(os.path.join(plexpy.CONFIG.CACHE_DIR, f)) for f in os.listdir(plexpy.CONFIG.CACHE_DIR) + if f.startswith('media_info-%s' % section_id) and f.endswith('.json')] + + logger.debug(u"PlexPy Libraries :: Deleted media info table cache for section_id %s." % section_id) + return 'Deleted media info table cache for library with id %s.' % section_id + else: + return 'Unable to delete media info table cache, section_id not valid.' + except Exception as e: + logger.warn(u"PlexPy Libraries :: Unable to delete media info table cache: %s." % e) \ No newline at end of file diff --git a/plexpy/notification_handler.py b/plexpy/notification_handler.py index ae722368..173069ea 100644 --- a/plexpy/notification_handler.py +++ b/plexpy/notification_handler.py @@ -16,20 +16,28 @@ import re import time +import re from plexpy import logger, config, notifiers, database, helpers, plextv, pmsconnect import plexpy def notify(stream_data=None, notify_action=None): - from plexpy import users - + from plexpy import users, libraries + if stream_data and notify_action: - # Check if notifications enabled for user + # Check if notifications enabled for user and library user_data = users.Users() - user_details = user_data.get_user_friendly_name(user=stream_data['user']) + user_details = user_data.get_details(user_id=stream_data['user_id']) + + library_data = libraries.Libraries() + library_details = library_data.get_details(section_id=stream_data['section_id']) if not user_details['do_notify']: + # logger.debug(u"PlexPy NotificationHandler :: Notifications for user '%s' is disabled." % user_details['username']) + return + elif not library_details['do_notify']: + # logger.debug(u"PlexPy NotificationHandler :: Notifications for library '%s' is disabled." % library_details['section_name']) return if (stream_data['media_type'] == 'movie' and plexpy.CONFIG.MOVIE_NOTIFY_ENABLE) \ @@ -195,10 +203,10 @@ def notify(stream_data=None, notify_action=None): elif stream_data['media_type'] == 'clip': pass else: - #logger.debug(u"PlexPy Notifier :: Notify called with unsupported media type.") + #logger.debug(u"PlexPy NotificationHandler :: Notify called with unsupported media type.") pass else: - logger.debug(u"PlexPy Notifier :: Notify called but incomplete data received.") + logger.debug(u"PlexPy NotificationHandler :: Notify called but incomplete data received.") def notify_timeline(timeline_data=None, notify_action=None): @@ -256,7 +264,7 @@ def notify_timeline(timeline_data=None, notify_action=None): notify_action=notify_action, script_args=notify_strings[2]) else: - logger.debug(u"PlexPy Notifier :: Notify timeline called but incomplete data received.") + logger.debug(u"PlexPy NotificationHandler :: Notify timeline called but incomplete data received.") def get_notify_state(session): @@ -334,11 +342,10 @@ def set_notify_state(session, state, agent_info): monitor_db.upsert(table_name='notify_log', key_dict=keys, value_dict=values) else: - logger.error('PlexPy Notifier :: Unable to set notify state.') + logger.error(u"PlexPy NotificationHandler :: Unable to set notify state.") def build_notify_text(session=None, timeline=None, state=None): - # Get the server name server_name = plexpy.CONFIG.PMS_NAME @@ -350,7 +357,7 @@ def build_notify_text(session=None, timeline=None, state=None): updated_at = server_times[0]['updated_at'] server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_float(updated_at))) else: - logger.error(u"PlexPy Notifier :: Unable to retrieve server uptime.") + logger.error(u"PlexPy NotificationHandler :: Unable to retrieve server uptime.") server_uptime = 'N/A' # Get metadata feed for item @@ -367,7 +374,7 @@ def build_notify_text(session=None, timeline=None, state=None): if metadata_list: metadata = metadata_list['metadata'] else: - logger.error(u"PlexPy Notifier :: Unable to retrieve metadata for rating_key %s" % str(rating_key)) + logger.error(u"PlexPy NotificationHandler :: Unable to retrieve metadata for rating_key %s" % str(rating_key)) return [] # Check for exclusion tags @@ -547,15 +554,16 @@ def build_notify_text(session=None, timeline=None, state=None): 'transcode_audio_codec': transcode_audio_codec, 'transcode_audio_channels': transcode_audio_channels, 'title': full_title, + 'library_name': metadata['library_name'], 'show_name': show_name, 'episode_name': episode_name, 'artist_name': artist_name, 'album_name': album_name, 'track_name': track_name, - 'season_num': metadata['parent_index'].zfill(1), - 'season_num00': metadata['parent_index'].zfill(2), - 'episode_num': metadata['index'].zfill(1), - 'episode_num00': metadata['index'].zfill(2), + 'season_num': metadata['parent_media_index'].zfill(1), + 'season_num00': metadata['parent_media_index'].zfill(2), + 'episode_num': metadata['media_index'].zfill(1), + 'episode_num00': metadata['media_index'].zfill(2), 'year': metadata['year'], 'studio': metadata['studio'], 'content_rating': metadata['content_rating'], @@ -597,16 +605,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_start_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_start_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -621,16 +629,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_stop_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_stop_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -645,16 +653,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_pause_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_pause_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -669,16 +677,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_resume_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_resume_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -693,16 +701,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_buffer_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_buffer_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -717,16 +725,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_watched_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_watched_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -739,16 +747,16 @@ def build_notify_text(session=None, timeline=None, state=None): try: subject_text = unicode(on_created_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_created_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -769,7 +777,7 @@ def build_server_notify_text(state=None): updated_at = server_times[0]['updated_at'] server_uptime = helpers.human_duration(int(time.time() - helpers.cast_to_float(updated_at))) else: - logger.error(u"PlexPy Notifier :: Unable to retrieve server uptime.") + logger.error(u"PlexPy NotificationHandler :: Unable to retrieve server uptime.") server_uptime = 'N/A' on_extdown_subject = plexpy.CONFIG.NOTIFY_ON_EXTDOWN_SUBJECT_TEXT @@ -812,14 +820,14 @@ def build_server_notify_text(state=None): try: subject_text = unicode(on_extdown_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_extdown_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") @@ -835,16 +843,16 @@ def build_server_notify_text(state=None): try: subject_text = unicode(on_intdown_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_intdown_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -857,16 +865,16 @@ def build_server_notify_text(state=None): try: subject_text = unicode(on_extup_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_extup_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -879,16 +887,16 @@ def build_server_notify_text(state=None): try: subject_text = unicode(on_intup_subject).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification subject. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification subject. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification subject. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification subject. Using fallback.") try: body_text = unicode(on_intup_body).format(**available_params) except LookupError, e: - logger.error(u"PlexPy Notifier :: Unable to parse field %s in notification body. Using fallback." % e) + logger.error(u"PlexPy NotificationHandler :: Unable to parse field %s in notification body. Using fallback." % e) except: - logger.error(u"PlexPy Notifier :: Unable to parse custom notification body. Using fallback.") + logger.error(u"PlexPy NotificationHandler :: Unable to parse custom notification body. Using fallback.") return [subject_text, body_text, script_args] else: @@ -899,7 +907,5 @@ def build_server_notify_text(state=None): def strip_tag(data): - import re - p = re.compile(r'<.*?>') return p.sub('', data) diff --git a/plexpy/notifiers.py b/plexpy/notifiers.py index f9c89903..bee1daab 100644 --- a/plexpy/notifiers.py +++ b/plexpy/notifiers.py @@ -525,10 +525,10 @@ class GROWL(object): try: growl.register() except gntp.notifier.errors.NetworkError: - logger.warning(u'Growl notification failed: network error') + logger.warn(u"PlexPy Notifier :: Growl notification failed: network error") return except gntp.notifier.errors.AuthError: - logger.warning(u'Growl notification failed: authentication error') + logger.warn(u"PlexPy Notifier :: Growl notification failed: authentication error") return # Fix message @@ -548,11 +548,11 @@ class GROWL(object): description=message, icon=image ) + logger.info(u"PlexPy Notifier :: Growl notification sent.") except gntp.notifier.errors.NetworkError: - logger.warning(u'Growl notification failed: network error') + logger.warn(u"PlexPy Notifier :: Growl notification failed: network error") return - logger.info(u"Growl notifications sent.") def updateLibrary(self): # For uniformity reasons not removed @@ -617,14 +617,14 @@ class PROWL(object): request_status = response.status if request_status == 200: - logger.info(u"Prowl notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Prowl notification sent.") + return True elif request_status == 401: - logger.info(u"Prowl auth failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Prowl notification failed: %s" % response.reason) + return False else: - logger.info(u"Prowl notification failed.") - return False + logger.warn(u"PlexPy Notifier :: Prowl notification failed.") + return False def updateLibrary(self): # For uniformity reasons not removed @@ -698,7 +698,7 @@ class XBMC(object): time = "3000" # in ms for host in hosts: - logger.info('Sending notification command to XMBC @ ' + host) + logger.info(u"PlexPy Notifier :: Sending notification command to XMBC @ " + host) try: version = self._sendjson(host, 'Application.GetProperties', {'properties': ['version']})['version']['major'] @@ -713,15 +713,17 @@ class XBMC(object): if not request: raise Exception + else: + logger.info(u"PlexPy Notifier :: XBMC notification sent.") except Exception: - logger.error('Error sending notification request to XBMC') + logger.warn(u"PlexPy Notifier :: XBMC notification filed.") def return_config_options(self): config_option = [{'label': 'XBMC Host:Port', 'value': self.hosts, 'name': 'xbmc_host', - 'description': 'Host running XBMC (e.g. http://localhost:8080). Separate multiple hosts with commas.', + 'description': 'Host running XBMC (e.g. http://localhost:8080). Separate multiple hosts with commas (,).', 'input_type': 'text' }, {'label': 'XBMC Username', @@ -763,12 +765,12 @@ class Plex(object): base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') req.add_header("Authorization", "Basic %s" % base64string) - logger.info('Plex url: %s' % url) + # logger.info(u"PlexPy Notifier :: Plex url: %s" % url) try: handle = urllib2.urlopen(req) except Exception as e: - logger.warn('Error opening Plex url: %s' % e) + logger.error(u"PlexPy Notifier :: Error opening Plex url: %s" % e) return response = handle.read().decode(plexpy.SYS_ENCODING) @@ -784,7 +786,7 @@ class Plex(object): time = "3000" # in ms for host in hosts: - logger.info('Sending notification command to Plex Media Server @ ' + host) + logger.info(u"PlexPy Notifier :: Sending notification command to Plex Media Server @ " + host) try: notification = header + "," + message + "," + time notifycommand = {'command': 'ExecBuiltIn', 'parameter': 'Notification(' + notification + ')'} @@ -792,9 +794,11 @@ class Plex(object): if not request: raise Exception + else: + logger.info(u"PlexPy Notifier :: Plex notification sent.") except: - logger.warn('Error sending notification request to Plex Media Server') + logger.warn(u"PlexPy Notifier :: Plex notification failed.") def return_config_options(self): config_option = [{'label': 'Plex Client Host:Port', @@ -855,9 +859,10 @@ class NMA(object): response = p.push(title, event, message, priority=nma_priority, batch_mode=batch) if not response[api][u'code'] == u'200': - logger.error(u'Could not send notification to NotifyMyAndroid') + logger.warn(u"PlexPy Notifier :: NotifyMyAndroid notification failed.") return False else: + logger.info(u"PlexPy Notifier :: NotifyMyAndroid notification sent.") return True def return_config_options(self): @@ -918,14 +923,14 @@ class PUSHBULLET(object): # logger.debug(u"PushBullet response body: %r" % response.read()) if request_status == 200: - logger.info(u"PushBullet notifications sent.") - return True + logger.info(u"PlexPy Notifier :: PushBullet notification sent.") + return True elif request_status >= 400 and request_status < 500: - logger.info(u"PushBullet request failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: PushBullet notification failed: %s" % response.reason) + return False else: - logger.info(u"PushBullet notification failed serverside.") - return False + logger.warn(u"PlexPy Notifier :: PushBullet notification failed.") + return False def test(self, apikey, deviceid): @@ -952,10 +957,10 @@ class PUSHBULLET(object): devices.update({'': ''}) return devices elif request_status >= 400 and request_status < 500: - logger.info(u"Unable to retrieve Pushbullet devices list: %s" % response.reason) + logger.warn(u"PlexPy Notifier :: Unable to retrieve Pushbullet devices list: %s" % response.reason) return {'': ''} else: - logger.info(u"Unable to retrieve Pushbullet devices list.") + logger.warn(u"PlexPy Notifier :: Unable to retrieve Pushbullet devices list.") return {'': ''} else: @@ -1020,14 +1025,14 @@ class PUSHALOT(object): # logger.debug(u"Pushalot response body: %r" % response.read()) if request_status == 200: - logger.info(u"Pushalot notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Pushalot notification sent.") + return True elif request_status == 410: - logger.info(u"Pushalot auth failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Pushalot notification failed: %s" % response.reason) + return False else: - logger.info(u"Pushalot notification failed.") - return False + logger.warn(u"PlexPy Notifier :: Pushalot notification failed.") + return False def return_config_options(self): config_option = [{'label': 'Pushalot API Key', @@ -1077,14 +1082,14 @@ class PUSHOVER(object): # logger.debug(u"Pushover response body: %r" % response.read()) if request_status == 200: - logger.info(u"Pushover notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Pushover notification sent.") + return True elif request_status >= 400 and request_status < 500: - logger.info(u"Pushover request failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Pushover notification failed: %s" % response.reason) + return False else: - logger.info(u"Pushover notification failed.") - return False + logger.warn(u"PlexPy Notifier :: Pushover notification failed.") + return False def updateLibrary(self): # For uniformity reasons not removed @@ -1111,10 +1116,10 @@ class PUSHOVER(object): sounds.update({'': ''}) return sounds elif request_status >= 400 and request_status < 500: - logger.info(u"Unable to retrieve Pushover notification sounds list: %s" % response.reason) + logger.warn(u"PlexPy Notifier :: Unable to retrieve Pushover notification sounds list: %s" % response.reason) return {'': ''} else: - logger.info(u"Unable to retrieve Pushover notification sounds list.") + logger.warn(u"PlexPy Notifier :: Unable to retrieve Pushover notification sounds list.") return {'': ''} else: @@ -1177,12 +1182,12 @@ class TwitterNotifier(object): oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) oauth_client = oauth.Client(oauth_consumer) - logger.info('Requesting temp token from Twitter') + logger.info("PlexPy Notifier :: Requesting temp token from Twitter") resp, content = oauth_client.request(self.REQUEST_TOKEN_URL, 'GET') if resp['status'] != '200': - logger.info('Invalid respond from Twitter requesting temp token: %s' % resp['status']) + logger.warn("PlexPy Notifier :: Invalid respond from Twitter requesting temp token: %s" % resp['status']) else: request_token = dict(parse_qsl(content)) @@ -1201,25 +1206,25 @@ class TwitterNotifier(object): token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(key) - logger.info('Generating and signing request for an access token using key ' + key) + # logger.debug(u"Generating and signing request for an access token using key " + key) oauth_consumer = oauth.Consumer(key=self.consumer_key, secret=self.consumer_secret) - # logger.debug('oauth_consumer: ' + str(oauth_consumer)) + # logger.debug(u"oauth_consumer: " + str(oauth_consumer)) oauth_client = oauth.Client(oauth_consumer, token) - # logger.info('oauth_client: ' + str(oauth_client)) + # logger.debug(u"oauth_client: " + str(oauth_client)) resp, content = oauth_client.request(self.ACCESS_TOKEN_URL, method='POST', body='oauth_verifier=%s' % key) - # logger.info('resp, content: ' + str(resp) + ',' + str(content)) + # logger.debug(u"resp, content: " + str(resp) + ',' + str(content)) access_token = dict(parse_qsl(content)) - # logger.info('access_token: ' + str(access_token)) + # logger.debug(u"access_token: " + str(access_token)) - # logger.info('resp[status] = ' + str(resp['status'])) + # logger.debug(u"resp[status] = " + str(resp['status'])) if resp['status'] != '200': - logger.info('The request for a token with did not succeed: ' + str(resp['status']), logger.ERROR) + logger.error(u"PlexPy Notifier :: The request for a Twitter token did not succeed: " + str(resp['status']), logger.ERROR) return False else: - logger.info('Your Twitter Access Token key: %s' % access_token['oauth_token']) - logger.info('Access Token secret: %s' % access_token['oauth_token_secret']) + # logger.info(u"PlexPy Notifier :: Your Twitter Access Token key: %s" % access_token['oauth_token']) + # logger.info(u"PlexPy Notifier :: Access Token secret: %s" % access_token['oauth_token_secret']) plexpy.CONFIG.TWITTER_USERNAME = access_token['oauth_token'] plexpy.CONFIG.TWITTER_PASSWORD = access_token['oauth_token_secret'] plexpy.CONFIG.write() @@ -1231,15 +1236,15 @@ class TwitterNotifier(object): access_token_key = plexpy.CONFIG.TWITTER_USERNAME access_token_secret = plexpy.CONFIG.TWITTER_PASSWORD - # logger.info(u"Sending tweet: " + message) + # logger.info(u"PlexPy Notifier :: Sending tweet: " + message) api = twitter.Api(username, password, access_token_key, access_token_secret) try: api.PostUpdate(message) - logger.info(u"Twitter notifications sent.") + logger.info(u"PlexPy Notifier :: Twitter notification sent") except Exception as e: - logger.info(u"Error sending Tweet: %s" % e) + logger.warn(u"PlexPy Notifier :: Twitter notification failed: %s" % e) return False return True @@ -1335,13 +1340,13 @@ class OSX_NOTIFY(object): notification_center = NSUserNotificationCenter.defaultUserNotificationCenter() notification_center.deliverNotification_(notification) - logger.info(u"OSX Notify notifications sent.") + logger.info(u"PlexPy Notifier :: OSX Notify notification sent.") del pool return True except Exception as e: - logger.warn('Error sending OS X Notification: %s' % e) + logger.warn(u"PlexPy Notifier :: OSX notification failed: %s" % e) return False def swizzled_bundleIdentifier(self, original, swizzled): @@ -1382,13 +1387,47 @@ class BOXCAR(object): req = urllib2.Request(self.url) handle = urllib2.urlopen(req, data) handle.close() - logger.info(u"Boxcar2 notifications sent.") + logger.info(u"PlexPy Notifier :: Boxcar2 notification sent.") return True except urllib2.URLError as e: - logger.warn('Error sending Boxcar2 Notification: %s' % e) + logger.warn(u"PlexPy Notifier :: Boxcar2 notification failed: %s" % e) return False + def get_sounds(self): + sounds = {'': '', + 'beep-crisp': 'Beep (Crisp)', + 'beep-soft': 'Beep (Soft)', + 'bell-modern': 'Bell (Modern)', + 'bell-one-tone': 'Bell (One Tone)', + 'bell-simple': 'Bell (Simple)', + 'bell-triple': 'Bell (Triple)', + 'bird-1': 'Bird (1)', + 'bird-2': 'Bird (2)', + 'boing': 'Boing', + 'cash': 'Cash', + 'clanging': 'Clanging', + 'detonator-charge': 'Detonator Charge', + 'digital-alarm': 'Digital Alarm', + 'done': 'Done', + 'echo': 'Echo', + 'flourish': 'Flourish', + 'harp': 'Harp', + 'light': 'Light', + 'magic-chime':'Magic Chime', + 'magic-coin': 'Magic Coin', + 'no-sound': 'No Sound', + 'notifier-1': 'Notifier (1)', + 'notifier-2': 'Notifier (2)', + 'notifier-3': 'Notifier (3)', + 'orchestral-long': 'Orchestral (Long)', + 'orchestral-short': 'Orchestral (Short)', + 'score': 'Score', + 'success': 'Success', + 'up': 'Up'} + + return sounds + def return_config_options(self): config_option = [{'label': 'Boxcar Access Token', 'value': plexpy.CONFIG.BOXCAR_TOKEN, @@ -1401,36 +1440,7 @@ class BOXCAR(object): 'name': 'boxcar_sound', 'description': 'Set the notification sound. Leave blank for the default sound.', 'input_type': 'select', - 'select_options': {'': '', - 'beep-crisp': 'Beep (Crisp)', - 'beep-soft': 'Beep (Soft)', - 'bell-modern': 'Bell (Modern)', - 'bell-one-tone': 'Bell (One Tone)', - 'bell-simple': 'Bell (Simple)', - 'bell-triple': 'Bell (Triple)', - 'bird-1': 'Bird (1)', - 'bird-2': 'Bird (2)', - 'boing': 'Boing', - 'cash': 'Cash', - 'clanging': 'Clanging', - 'detonator-charge': 'Detonator Charge', - 'digital-alarm': 'Digital Alarm', - 'done': 'Done', - 'echo': 'Echo', - 'flourish': 'Flourish', - 'harp': 'Harp', - 'light': 'Light', - 'magic-chime': 'Magic Chime', - 'magic-coin': 'Magic Coin', - 'no-sound': 'No Sound', - 'notifier-1': 'Notifier (1)', - 'notifier-2': 'Notifier (2)', - 'notifier-3': 'Notifier (3)', - 'orchestral-long': 'Orchestral (Long)', - 'orchestral-short': 'Orchestral (Short)', - 'score': 'Score', - 'success': 'Success', - 'up': 'Up'} + 'select_options': self.get_sounds() } ] @@ -1471,11 +1481,11 @@ class Email(object): mailserver.sendmail(plexpy.CONFIG.EMAIL_FROM, recipients, message.as_string()) mailserver.quit() - logger.info(u"Email notifications sent.") + logger.info(u"PlexPy Notifier :: Email notification sent.") return True - except Exception, e: - logger.warn('Error sending Email: %s' % e) + except Exception as e: + logger.warn(u"PlexPy Notifier :: Email notification failed: %s" % e) return False def return_config_options(self): @@ -1558,7 +1568,7 @@ class IFTTT(object): data = {'value1': subject.encode("utf-8"), 'value2': message.encode("utf-8")} - # logger.debug("Ifttt SENDING: %s" % json.dumps(data)) + # logger.debug(u"Ifttt SENDING: %s" % json.dumps(data)) http_handler.request("POST", "/trigger/%s/with/key/%s" % (self.event, self.apikey), @@ -1571,14 +1581,14 @@ class IFTTT(object): # logger.debug(u"Ifttt response body: %r" % response.read()) if request_status == 200: - logger.info(u"Ifttt notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Ifttt notification sent.") + return True elif request_status >= 400 and request_status < 500: - logger.info(u"Ifttt request failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Ifttt notification failed: %s" % response.reason) + return False else: - logger.info(u"Ifttt notification failed serverside.") - return False + logger.warn(u"PlexPy Notifier :: Ifttt notification failed.") + return False def test(self): return self.notify('PlexPy', 'Test Message') @@ -1631,14 +1641,14 @@ class TELEGRAM(object): request_status = response.status if request_status == 200: - logger.info(u"Telegram notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Telegram notification sent.") + return True elif request_status >= 400 and request_status < 500: - logger.info(u"Telegram request failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Telegram notification failed: %s" % response.reason) + return False else: - logger.info(u"Telegram notification failed serverside.") - return False + logger.warn(u"PlexPy Notifier :: Telegram notification failed.") + return False def updateLibrary(self): # For uniformity reasons not removed @@ -1708,14 +1718,14 @@ class SLACK(object): request_status = response.status if request_status == 200: - logger.info(u"Slack notifications sent.") - return True + logger.info(u"PlexPy Notifier :: Slack notification sent.") + return True elif request_status >= 400 and request_status < 500: - logger.info(u"Slack request failed: %s" % response.reason) - return False + logger.warn(u"PlexPy Notifier :: Slack notification failed: %s" % response.reason) + return False else: - logger.info(u"Slack notification failed serverside.") - return False + logger.warn(u"PlexPy Notifier :: Slack notification failed.") + return False def updateLibrary(self): #For uniformity reasons not removed @@ -2034,7 +2044,7 @@ class FacebookNotifier(object): self._post_facebook(subject + ': ' + message) def test_notify(self): - return self._post_facebook("This is a test notification from PlexPy at " + helpers.now()) + return self._post_facebook(u"PlexPy Notifiers :: This is a test notification from PlexPy at " + helpers.now()) def _get_authorization(self): return facebook.auth_url(app_id=self.app_id, @@ -2042,7 +2052,7 @@ class FacebookNotifier(object): perms=['user_managed_groups','publish_actions']) def _get_credentials(self, code): - logger.info('Requesting access token from Facebook') + logger.info(u"PlexPy Notifiers :: Requesting access token from Facebook") try: # Request user access token @@ -2062,7 +2072,7 @@ class FacebookNotifier(object): plexpy.CONFIG.FACEBOOK_TOKEN = access_token plexpy.CONFIG.write() except Exception as e: - logger.info(u"Error requesting Facebook access token: %s" % e) + logger.info(u"PlexPy Notifiers :: Error requesting Facebook access token: %s" % e) return False return True @@ -2076,21 +2086,23 @@ class FacebookNotifier(object): try: api.put_wall_post(profile_id=group_id, message=message) - logger.info(u"Facebook notifications sent.") + logger.info(u"PlexPy Notifiers :: Facebook notifications sent.") except Exception as e: - logger.info(u"Error sending Facebook post: %s" % e) + logger.info(u"PlexPy Notifiers :: Error sending Facebook post: %s" % e) return False return True else: - logger.info('Error sending Facebook post: No Facebook Group ID provided.') + logger.info(u"PlexPy Notifiers :: Error sending Facebook post: No Facebook Group ID provided.") return False def return_config_options(self): config_option = [{'label': 'Instructions', 'description': 'Facebook notifications are currently experimental!

      \ - Step 1: Visit Facebook Developers to create a new app using advanced setup.
      \ - Step 2: Go to Settings > Advanced and fill in Valid OAuth redirect URIs with your PlexPy URL (i.e. http://localhost:8181).
      \ + Step 1: Visit \ + Facebook Developers to create a new app using advanced setup.
      \ + Step 2: Go to Settings > Advanced and fill in \ + Valid OAuth redirect URIs with your PlexPy URL (i.e. http://localhost:8181).
      \ Step 3: Fill in the App ID and App Secret below.
      \ Step 4: Click the Request Authorization button below.', 'input_type': 'help' diff --git a/plexpy/plextv.py b/plexpy/plextv.py index 2dc92486..cb807785 100644 --- a/plexpy/plextv.py +++ b/plexpy/plextv.py @@ -16,7 +16,7 @@ # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see . -from plexpy import logger, helpers, users, http_handler, database +from plexpy import logger, helpers, http_handler, database, users import xmltodict import json from xml.dom import minidom @@ -26,7 +26,7 @@ import plexpy def refresh_users(): - logger.info("Requesting users list refresh...") + logger.info(u"PlexPy PlexTV :: Requesting users list refresh...") result = PlexTV().get_full_users_list() monitor_db = database.MonitorDatabase() @@ -55,13 +55,13 @@ def refresh_users(): monitor_db.upsert('users', new_value_dict, control_value_dict) - logger.info("Users list refreshed.") + logger.info(u"PlexPy PlexTV :: Users list refreshed.") else: - logger.warn("Unable to refresh users list.") + logger.warn(u"PlexPy PlexTV :: Unable to refresh users list.") def get_real_pms_url(): - logger.info("Requesting URLs for server...") + logger.info(u"PlexPy PlexTV :: Requesting URLs for server...") # Reset any current PMS_URL value plexpy.CONFIG.__setattr__('PMS_URL', '') @@ -85,15 +85,15 @@ def get_real_pms_url(): if plexpy.CONFIG.PMS_IS_REMOTE and item['local'] == '0': plexpy.CONFIG.__setattr__('PMS_URL', item['uri']) plexpy.CONFIG.write() - logger.info("Server URL retrieved.") + logger.info(u"PlexPy PlexTV :: Server URL retrieved.") if not plexpy.CONFIG.PMS_IS_REMOTE and item['local'] == '1': plexpy.CONFIG.__setattr__('PMS_URL', item['uri']) plexpy.CONFIG.write() - logger.info("Server URL retrieved.") + logger.info(u"PlexPy PlexTV :: Server URL retrieved.") else: plexpy.CONFIG.__setattr__('PMS_URL', fallback_url) plexpy.CONFIG.write() - logger.warn("Unable to retrieve server URLs. Using user-defined value.") + logger.warn(u"PlexPy PlexTV :: Unable to retrieve server URLs. Using user-defined value.") else: plexpy.CONFIG.__setattr__('PMS_URL', fallback_url) plexpy.CONFIG.write() @@ -139,13 +139,16 @@ class PlexTV(object): plextv_response = self.get_plex_auth(output_format='xml') if plextv_response: - xml_head = plextv_response.getElementsByTagName('user') - if not xml_head: - logger.warn("Error parsing XML for Plex.tv token") + try: + xml_head = plextv_response.getElementsByTagName('user') + if xml_head: + auth_token = xml_head[0].getAttribute('authenticationToken') + else: + logger.warn(u"PlexPy PlexTV :: Could not get Plex authentication token.") + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_token: %s." % e) return [] - auth_token = xml_head[0].getAttribute('authenticationToken') - return auth_token else: return [] @@ -213,16 +216,16 @@ class PlexTV(object): try: xml_parse = minidom.parseString(own_account) - except Exception, e: - logger.warn("Error parsing XML for Plex account details: %s" % e) + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list own account: %s" % e) return [] except: - logger.warn("Error parsing XML for Plex account details.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list own account.") return [] xml_head = xml_parse.getElementsByTagName('user') if not xml_head: - logger.warn("Error parsing XML for Plex account details.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list.") else: for a in xml_head: own_details = {"user_id": helpers.get_xml_attr(a, 'id'), @@ -238,14 +241,16 @@ class PlexTV(object): try: xml_parse = minidom.parseString(friends_list) - except Exception, e: - logger.warn("Error parsing XML for Plex friends list: %s" % e) + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list friends list: %s" % e) + return [] except: - logger.warn("Error parsing XML for Plex friends list.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list friends list.") + return [] xml_head = xml_parse.getElementsByTagName('User') if not xml_head: - logger.warn("Error parsing XML for Plex friends list.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_full_users_list.") else: for a in xml_head: friend = {"user_id": helpers.get_xml_attr(a, 'id'), @@ -269,17 +274,17 @@ class PlexTV(object): try: xml_parse = minidom.parseString(sync_list) - except Exception, e: - logger.warn("Error parsing XML for Plex sync lists: %s" % e) + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items: %s" % e) return [] except: - logger.warn("Error parsing XML for Plex sync lists.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items.") return [] xml_head = xml_parse.getElementsByTagName('SyncList') if not xml_head: - logger.warn("Error parsing XML for Plex sync lists.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_synced_items.") else: for a in xml_head: client_id = helpers.get_xml_attr(a, 'id') @@ -287,8 +292,8 @@ class PlexTV(object): for device in sync_device: device_user_id = helpers.get_xml_attr(device, 'userID') try: - device_username = user_data.get_user_details(user_id=device_user_id)['username'] - device_friendly_name = user_data.get_user_details(user_id=device_user_id)['friendly_name'] + device_username = user_data.get_details(user_id=device_user_id)['username'] + device_friendly_name = user_data.get_details(user_id=device_user_id)['friendly_name'] except: device_username = '' device_friendly_name = '' @@ -375,7 +380,7 @@ class PlexTV(object): if plexpy.CONFIG.PMS_IDENTIFIER: server_id = plexpy.CONFIG.PMS_IDENTIFIER else: - logger.error('PlexPy PlexTV connector :: Unable to retrieve server identity.') + logger.error(u"PlexPy PlexTV :: Unable to retrieve server identity.") return [] plextv_resources = self.get_plextv_resources(include_https=include_https) @@ -383,17 +388,17 @@ class PlexTV(object): try: xml_parse = minidom.parseString(plextv_resources) - except Exception, e: - logger.warn("Error parsing XML for Plex resources: %s" % e) + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s" % e) return [] except: - logger.warn("Error parsing XML for Plex resources.") + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls.") return [] try: xml_head = xml_parse.getElementsByTagName('Device') - except: - logger.warn("Error parsing XML for Plex resources.") + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_urls: %s." % e) return [] for a in xml_head: @@ -430,49 +435,14 @@ class PlexTV(object): return server_urls - def discover(self): - """ Query plex for all servers online. Returns the ones you own in a selectize format """ - result = self.get_plextv_resources(include_https=True, output_format='raw') - servers = xmltodict.parse(result, process_namespaces=True, attr_prefix='') - clean_servers = [] - - try: - if servers: - # Fix if its only one "device" - if int(servers['MediaContainer']['size']) == 1: - servers['MediaContainer']['Device'] = [servers['MediaContainer']['Device']] - - for server in servers['MediaContainer']['Device']: - # Only grab servers online and own - if server.get('presence', None) == '1' and server.get('owned', None) == '1' and server.get('provides', None) == 'server': - # If someone only has one connection.. - if isinstance(server['Connection'], dict): - server['Connection'] = [server['Connection']] - - for s in server['Connection']: - # to avoid circular ref - d = {} - d.update(s) - d.update(server) - d['label'] = d['name'] - d['value'] = d['address'] - del d['Connection'] - clean_servers.append(d) - - except Exception as e: - logger.warn('Failed to get servers from plex %s' % e) - return clean_servers - - return json.dumps(clean_servers, indent=4) - def get_server_times(self): servers = self.get_plextv_server_list(output_format='xml') server_times = [] try: xml_head = servers.getElementsByTagName('Server') - except: - logger.warn("Error parsing XML for Plex servers.") + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Unable to parse XML for get_server_times: %s." % e) return [] for a in xml_head: @@ -482,4 +452,42 @@ class PlexTV(object): }) break - return server_times \ No newline at end of file + return server_times + + def discover(self): + """ Query plex for all servers online. Returns the ones you own in a selectize format """ + servers = self.get_plextv_resources(include_https=True, output_format='xml') + clean_servers = [] + + try: + xml_head = servers.getElementsByTagName('MediaContainer') + except Exception as e: + logger.warn(u"PlexPy PlexTV :: Failed to get servers from plex: %s." % e) + return [] + + for a in xml_head: + if a.getAttribute('size'): + if a.getAttribute('size') == '0': + return [] + + if a.getElementsByTagName('Device'): + devices = a.getElementsByTagName('Device') + + for d in devices: + if helpers.get_xml_attr(d, 'presence') == '1' and \ + helpers.get_xml_attr(d, 'owned') == '1' and \ + helpers.get_xml_attr(d, 'provides') == 'server': + connections = d.getElementsByTagName('Connection') + + for c in connections: + server = {'httpsRequired': helpers.get_xml_attr(d, 'httpsRequired'), + 'clientIdentifier': helpers.get_xml_attr(d, 'clientIdentifier'), + 'label': helpers.get_xml_attr(d, 'name'), + 'ip': helpers.get_xml_attr(c, 'address'), + 'port': helpers.get_xml_attr(c, 'port'), + 'local': helpers.get_xml_attr(c, 'local'), + 'value': helpers.get_xml_attr(c, 'address') + } + clean_servers.append(server) + + return clean_servers \ No newline at end of file diff --git a/plexpy/plexwatch_import.py b/plexpy/plexwatch_import.py index b92be702..533b5bdc 100644 --- a/plexpy/plexwatch_import.py +++ b/plexpy/plexwatch_import.py @@ -26,12 +26,12 @@ def extract_plexwatch_xml(xml=None): try: xml_parse = minidom.parseString(clean_xml) except: - logger.warn("Error parsing XML for Plexwatch database.") + logger.warn(u"PlexPy Importer :: Error parsing XML for Plexwatch database.") return None xml_head = xml_parse.getElementsByTagName('opt') if not xml_head: - logger.warn("Error parsing XML for Plexwatch database.") + logger.warn(u"PlexPy Importer :: Error parsing XML for Plexwatch database.") return None for a in xml_head: @@ -41,6 +41,7 @@ def extract_plexwatch_xml(xml=None): grandparent_thumb = helpers.get_xml_attr(a, 'grandparentThumb') grandparent_title = helpers.get_xml_attr(a, 'grandparentTitle') guid = helpers.get_xml_attr(a, 'guid') + section_id = helpers.get_xml_attr(a, 'librarySectionID') media_index = helpers.get_xml_attr(a, 'index') originally_available_at = helpers.get_xml_attr(a, 'originallyAvailableAt') last_viewed_at = helpers.get_xml_attr(a, 'lastViewedAt') @@ -156,6 +157,7 @@ def extract_plexwatch_xml(xml=None): 'title': title, 'tagline': tagline, 'guid': guid, + 'section_id': section_id, 'media_index': media_index, 'originally_available_at': originally_available_at, 'last_viewed_at': last_viewed_at, @@ -203,23 +205,23 @@ def validate_database(database=None, table_name=None): try: connection = sqlite3.connect(database, timeout=20) except sqlite3.OperationalError: - logger.error('PlexPy Importer :: Invalid database specified.') + logger.error(u"PlexPy Importer :: Invalid database specified.") return 'Invalid database specified.' except ValueError: - logger.error('PlexPy Importer :: Invalid database specified.') + logger.error(u"PlexPy Importer :: Invalid database specified.") return 'Invalid database specified.' except: - logger.error('PlexPy Importer :: Uncaught exception.') + logger.error(u"PlexPy Importer :: Uncaught exception.") return 'Uncaught exception.' try: connection.execute('SELECT ratingKey from %s' % table_name) connection.close() except sqlite3.OperationalError: - logger.error('PlexPy Importer :: Invalid database specified.') + logger.error(u"PlexPy Importer :: Invalid database specified.") return 'Invalid database specified.' except: - logger.error('PlexPy Importer :: Uncaught exception.') + logger.error(u"PlexPy Importer :: Uncaught exception.") return 'Uncaught exception.' return 'success' @@ -230,16 +232,16 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval connection = sqlite3.connect(database, timeout=20) connection.row_factory = sqlite3.Row except sqlite3.OperationalError: - logger.error('PlexPy Importer :: Invalid filename.') + logger.error(u"PlexPy Importer :: Invalid filename.") return None except ValueError: - logger.error('PlexPy Importer :: Invalid filename.') + logger.error(u"PlexPy Importer :: Invalid filename.") return None try: connection.execute('SELECT ratingKey from %s' % table_name) except sqlite3.OperationalError: - logger.error('PlexPy Importer :: Database specified does not contain the required fields.') + logger.error(u"PlexPy Importer :: Database specified does not contain the required fields.") return None logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...") @@ -249,6 +251,8 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval hours=0, minutes=0, seconds=0) plexpy.schedule_job(activity_pinger.check_recently_added, 'Check for recently added items', hours=0, minutes=0, seconds=0) + plexpy.schedule_job(activity_pinger.check_server_response, 'Check for server response', + hours=0, minutes=0, seconds=0) ap = activity_processor.ActivityProcessor() user_data = users.Users() @@ -352,8 +356,8 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval 'title': row['title'], 'parent_title': extracted_xml['parent_title'], 'grandparent_title': row['grandparent_title'], - 'index': extracted_xml['media_index'], - 'parent_index': extracted_xml['parent_media_index'], + 'media_index': extracted_xml['media_index'], + 'parent_media_index': extracted_xml['parent_media_index'], 'thumb': extracted_xml['thumb'], 'parent_thumb': extracted_xml['parent_thumb'], 'grandparent_thumb': extracted_xml['grandparent_thumb'], @@ -370,6 +374,7 @@ def import_from_plexwatch(database=None, table_name=None, import_ignore_interval 'rating': extracted_xml['rating'], 'duration': extracted_xml['duration'], 'guid': extracted_xml['guid'], + 'section_id': extracted_xml['section_id'], 'directors': extracted_xml['directors'], 'writers': extracted_xml['writers'], 'actors': extracted_xml['actors'], @@ -409,4 +414,3 @@ def import_users(): logger.debug(u"PlexPy Importer :: Users imported.") except: logger.debug(u"PlexPy Importer :: Failed to import users.") - diff --git a/plexpy/pmsconnect.py b/plexpy/pmsconnect.py index 2fc84a2f..024d6d2e 100644 --- a/plexpy/pmsconnect.py +++ b/plexpy/pmsconnect.py @@ -13,14 +13,14 @@ # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see . -from plexpy import logger, helpers, users, http_handler, common +from plexpy import logger, helpers, users, http_handler, common, database from urlparse import urlparse import plexpy import urllib2 def get_server_friendly_name(): - logger.info("Requesting name from server...") + logger.info(u"PlexPy Pmsconnect :: Requesting name from server...") server_name = PmsConnect().get_server_pref(pref='FriendlyName') # If friendly name is blank @@ -34,9 +34,55 @@ def get_server_friendly_name(): if server_name and server_name != plexpy.CONFIG.PMS_NAME: plexpy.CONFIG.__setattr__('PMS_NAME', server_name) plexpy.CONFIG.write() + logger.info(u"PlexPy Pmsconnect :: Server name retrieved.") return server_name +def refresh_libraries(): + logger.info(u"PlexPy Pmsconnect :: Requesting libraries list refresh...") + library_sections = PmsConnect().get_library_details() + + server_id = plexpy.CONFIG.PMS_IDENTIFIER + + library_keys = [] + + if library_sections: + monitor_db = database.MonitorDatabase() + + for section in library_sections: + section_keys = {'server_id': server_id, + 'section_id': section['section_id']} + section_values = {'server_id': server_id, + 'section_id': section['section_id'], + 'section_name': section['section_name'], + 'section_type': section['section_type'], + 'thumb': section['thumb'], + 'art': section['art'], + 'count': section['count'], + 'parent_count': section.get('parent_count', None), + 'child_count': section.get('child_count', None), + } + + monitor_db.upsert('library_sections', key_dict=section_keys, value_dict=section_values) + + library_keys.append(section['section_id']) + + + if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']: + plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys) + plexpy.CONFIG.write() + + if plexpy.CONFIG.UPDATE_SECTION_IDS == 1: + from plexpy import libraries + import threading + + threading.Thread(target=libraries.update_section_ids).start() + + logger.info(u"PlexPy Pmsconnect :: Libraries list refreshed.") + else: + logger.warn(u"PlexPy Pmsconnect :: Unable to refresh libraries list.") + + class PmsConnect(object): """ Retrieve data from Plex Server @@ -57,14 +103,14 @@ class PmsConnect(object): port=port, token=plexpy.CONFIG.PMS_TOKEN) - """ - Return current sessions. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_sessions(self, output_format=''): + """ + Return current sessions. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/status/sessions' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -73,15 +119,15 @@ class PmsConnect(object): return request - """ - Return metadata for request item. - - Parameters required: rating_key { Plex ratingKey } - Optional parameters: output_format { dict, json } - - Output: array - """ def get_metadata(self, rating_key='', output_format=''): + """ + Return metadata for request item. + + Parameters required: rating_key { Plex ratingKey } + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/library/metadata/' + rating_key request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -90,15 +136,15 @@ class PmsConnect(object): return request - """ - Return metadata for children of the request item. - - Parameters required: rating_key { Plex ratingKey } - Optional parameters: output_format { dict, json } - - Output: array - """ def get_metadata_children(self, rating_key='', output_format=''): + """ + Return metadata for children of the request item. + + Parameters required: rating_key { Plex ratingKey } + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/library/metadata/' + rating_key + '/children' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -107,15 +153,15 @@ class PmsConnect(object): return request - """ - Return list of recently added items. - - Parameters required: count { number of results to return } - Optional parameters: output_format { dict, json } - - Output: array - """ def get_recently_added(self, count='0', output_format=''): + """ + Return list of recently added items. + + Parameters required: count { number of results to return } + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/library/recentlyAdded?X-Plex-Container-Start=0&X-Plex-Container-Size=' + count request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -124,15 +170,32 @@ class PmsConnect(object): return request - """ - Return list of children in requested library item. + def get_library_recently_added(self, section_id='', count='0', output_format=''): + """ + Return list of recently added items. - Parameters required: rating_key { ratingKey of parent } - Optional parameters: output_format { dict, json } + Parameters required: count { number of results to return } + Optional parameters: output_format { dict, json } + + Output: array + """ + uri = '/library/sections/' + section_id + '/recentlyAdded?X-Plex-Container-Start=0&X-Plex-Container-Size=' + count + request = self.request_handler.make_request(uri=uri, + proto=self.protocol, + request_type='GET', + output_format=output_format) + + return request - Output: array - """ def get_children_list(self, rating_key='', output_format=''): + """ + Return list of children in requested library item. + + Parameters required: rating_key { ratingKey of parent } + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/library/metadata/' + rating_key + '/children' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -141,14 +204,31 @@ class PmsConnect(object): return request - """ - Return list of local servers. + def get_childrens_list(self, rating_key='', output_format=''): + """ + Return list of children in requested library item. - Optional parameters: output_format { dict, json } + Parameters required: rating_key { ratingKey of parent } + Optional parameters: output_format { dict, json } + + Output: array + """ + uri = '/library/metadata/' + rating_key + '/allLeaves' + request = self.request_handler.make_request(uri=uri, + proto=self.protocol, + request_type='GET', + output_format=output_format) + + return request - Output: array - """ def get_server_list(self, output_format=''): + """ + Return list of local servers. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/servers' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -157,14 +237,14 @@ class PmsConnect(object): return request - """ - Return the local servers preferences. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_server_prefs(self, output_format=''): + """ + Return the local servers preferences. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/:/prefs' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -173,14 +253,14 @@ class PmsConnect(object): return request - """ - Return the local server identity. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_local_server_identity(self, output_format=''): + """ + Return the local server identity. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/identity' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -189,14 +269,14 @@ class PmsConnect(object): return request - """ - Return list of libraries on server. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_libraries_list(self, output_format=''): + """ + Return list of libraries on server. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/library/sections' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -205,15 +285,17 @@ class PmsConnect(object): return request - """ - Return list of items in library on server. + def get_library_list(self, section_id='', list_type='all', count='0', sort_type='', output_format=''): + """ + Return list of items in library on server. - Optional parameters: output_format { dict, json } + Optional parameters: output_format { dict, json } - Output: array - """ - def get_library_list(self, section_key='', list_type='all', count='0', sort_type='', output_format=''): - uri = '/library/sections/' + section_key + '/' + list_type +'?X-Plex-Container-Start=0&X-Plex-Container-Size=' + count + sort_type + Output: array + """ + count = '&X-Plex-Container-Size=' + count if count else '' + + uri = '/library/sections/' + section_id + '/' + list_type +'?X-Plex-Container-Start=0' + count + sort_type request = self.request_handler.make_request(uri=uri, proto=self.protocol, request_type='GET', @@ -221,15 +303,15 @@ class PmsConnect(object): return request - """ - Return sync item details. - - Parameters required: sync_id { unique sync id for item } - Optional parameters: output_format { dict, json } - - Output: array - """ def get_sync_item(self, sync_id=None, output_format=''): + """ + Return sync item details. + + Parameters required: sync_id { unique sync id for item } + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/sync/items/' + sync_id request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -238,14 +320,14 @@ class PmsConnect(object): return request - """ - Return sync transcode queue. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_sync_transcode_queue(self, output_format=''): + """ + Return sync transcode queue. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/sync/transcodeQueue' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -254,14 +336,14 @@ class PmsConnect(object): return request - """ - Return search results. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_search(self, query='', track='', output_format=''): + """ + Return search results. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/search?query=' + urllib2.quote(query.encode('utf8')) + track request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -270,14 +352,14 @@ class PmsConnect(object): return request - """ - Return account details. - - Optional parameters: output_format { dict, json } - - Output: array - """ def get_account(self, output_format=''): + """ + Return account details. + + Optional parameters: output_format { dict, json } + + Output: array + """ uri = '/myplex/account' request = self.request_handler.make_request(uri=uri, proto=self.protocol, @@ -286,34 +368,38 @@ class PmsConnect(object): return request - """ - Refresh Plex remote access port mapping. - - Optional parameters: None - - Output: None - """ def put_refresh_reachability(self): + """ + Refresh Plex remote access port mapping. + + Optional parameters: None + + Output: None + """ uri = '/myplex/refreshReachability' request = self.request_handler.make_request(uri=uri, proto=self.protocol, request_type='PUT') return request - """ - Return processed and validated list of recently added items. - Parameters required: count { number of results to return } + def get_recently_added_details(self, section_id='', count='0'): + """ + Return processed and validated list of recently added items. - Output: array - """ - def get_recently_added_details(self, count='0'): - recent = self.get_recently_added(count, output_format='xml') + Parameters required: count { number of results to return } + + Output: array + """ + if section_id: + recent = self.get_library_recently_added(section_id, count, output_format='xml') + else: + recent = self.get_recently_added(count, output_format='xml') try: xml_head = recent.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_recently_added.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_recently_added: %s." % e) return [] recents_list = [] @@ -327,15 +413,21 @@ class PmsConnect(object): if a.getElementsByTagName('Directory'): recents_main = a.getElementsByTagName('Directory') for item in recents_main: - recent_type = helpers.get_xml_attr(item, 'type') - recent_items = {'media_type': recent_type, + recent_items = {'media_type': helpers.get_xml_attr(item, 'type'), 'rating_key': helpers.get_xml_attr(item, 'ratingKey'), 'parent_rating_key': helpers.get_xml_attr(item, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(item, 'grandparentRatingKey'), 'title': helpers.get_xml_attr(item, 'title'), 'parent_title': helpers.get_xml_attr(item, 'parentTitle'), - 'library_id': helpers.get_xml_attr(item, 'librarySectionID'), - 'library_title': helpers.get_xml_attr(item, 'librarySectionTitle'), + 'grandparent_title': helpers.get_xml_attr(item, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(item, 'index'), + 'parent_media_index': helpers.get_xml_attr(item, 'parentIndex'), + 'section_id': helpers.get_xml_attr(item, 'librarySectionID'), + 'library_name': helpers.get_xml_attr(item, 'librarySectionTitle'), + 'year': helpers.get_xml_attr(item, 'year'), 'thumb': helpers.get_xml_attr(item, 'thumb'), + 'parent_thumb': helpers.get_xml_attr(item, 'parentThumb'), + 'grandparent_thumb': helpers.get_xml_attr(item, 'grandparentThumb'), 'added_at': helpers.get_xml_attr(item, 'addedAt') } recents_list.append(recent_items) @@ -343,40 +435,42 @@ class PmsConnect(object): if a.getElementsByTagName('Video'): recents_main = a.getElementsByTagName('Video') for item in recents_main: - recent_type = helpers.get_xml_attr(item, 'type') - - if recent_type == 'movie': - recent_items = {'media_type': recent_type, - 'rating_key': helpers.get_xml_attr(item, 'ratingKey'), - 'title': helpers.get_xml_attr(item, 'title'), - 'parent_title': helpers.get_xml_attr(item, 'parentTitle'), - 'library_id': helpers.get_xml_attr(item, 'librarySectionID'), - 'library_title': helpers.get_xml_attr(item, 'librarySectionTitle'), - 'year': helpers.get_xml_attr(item, 'year'), - 'thumb': helpers.get_xml_attr(item, 'thumb'), - 'added_at': helpers.get_xml_attr(item, 'addedAt') - } - recents_list.append(recent_items) - else: - pass + recent_items = {'media_type': helpers.get_xml_attr(item, 'type'), + 'rating_key': helpers.get_xml_attr(item, 'ratingKey'), + 'parent_rating_key': helpers.get_xml_attr(item, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(item, 'grandparentRatingKey'), + 'title': helpers.get_xml_attr(item, 'title'), + 'parent_title': helpers.get_xml_attr(item, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(item, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(item, 'index'), + 'parent_media_index': helpers.get_xml_attr(item, 'parentIndex'), + 'section_id': helpers.get_xml_attr(item, 'librarySectionID'), + 'library_name': helpers.get_xml_attr(item, 'librarySectionTitle'), + 'year': helpers.get_xml_attr(item, 'year'), + 'thumb': helpers.get_xml_attr(item, 'thumb'), + 'parent_thumb': helpers.get_xml_attr(item, 'parentThumb'), + 'grandparent_thumb': helpers.get_xml_attr(item, 'grandparentThumb'), + 'added_at': helpers.get_xml_attr(item, 'addedAt') + } + recents_list.append(recent_items) output = {'recently_added': sorted(recents_list, key=lambda k: k['added_at'], reverse=True)} return output - """ - Return processed and validated metadata list for requested item. + def get_metadata_details(self, rating_key='', get_media_info=False): + """ + Return processed and validated metadata list for requested item. - Parameters required: rating_key { Plex ratingKey } + Parameters required: rating_key { Plex ratingKey } - Output: array - """ - def get_metadata_details(self, rating_key=''): + Output: array + """ metadata = self.get_metadata(str(rating_key), output_format='xml') try: xml_head = metadata.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_metadata.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_metadata: %s." % e) return [] metadata_list = [] @@ -397,7 +491,11 @@ class PmsConnect(object): metadata_main = a.getElementsByTagName('Track')[0] metadata_type = helpers.get_xml_attr(metadata_main, 'type') else: - logger.debug(u"Metadata failed") + logger.debug(u"PlexPy Pmsconnect :: Metadata failed") + return None + + section_id = helpers.get_xml_attr(a, 'librarySectionID') + library_name = helpers.get_xml_attr(a, 'librarySectionTitle') genres = [] actors = [] @@ -420,15 +518,54 @@ class PmsConnect(object): for director in metadata_main.getElementsByTagName('Director'): directors.append(helpers.get_xml_attr(director, 'tag')) - if metadata_type == 'show': + if metadata_type == 'movie': metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': helpers.get_xml_attr(metadata_main, 'studio'), + 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': helpers.get_xml_attr(metadata_main, 'studio'), + 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), + 'summary': helpers.get_xml_attr(metadata_main, 'summary'), + 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), + 'rating': helpers.get_xml_attr(metadata_main, 'rating'), + 'duration': helpers.get_xml_attr(metadata_main, 'duration'), + 'year': helpers.get_xml_attr(metadata_main, 'year'), + 'thumb': helpers.get_xml_attr(metadata_main, 'thumb'), + 'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb'), + 'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'), + 'art': helpers.get_xml_attr(metadata_main, 'art'), + 'originally_available_at': helpers.get_xml_attr(metadata_main, 'originallyAvailableAt'), + 'added_at': helpers.get_xml_attr(metadata_main, 'addedAt'), + 'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'), + 'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'), + 'guid': helpers.get_xml_attr(metadata_main, 'guid'), + 'genres': genres, + 'actors': actors, + 'writers': writers, + 'directors': directors + } + metadata_list = {'metadata': metadata} + + elif metadata_type == 'show': + metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, + 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), + 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), + 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': helpers.get_xml_attr(metadata_main, 'studio'), 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), 'summary': helpers.get_xml_attr(metadata_main, 'summary'), 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -450,18 +587,22 @@ class PmsConnect(object): 'actors': actors } metadata_list = {'metadata': metadata} + elif metadata_type == 'season': parent_rating_key = helpers.get_xml_attr(metadata_main, 'parentRatingKey') show_details = self.get_metadata_details(parent_rating_key) metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': show_details['metadata']['studio'], + 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': show_details['metadata']['studio'], 'content_rating': show_details['metadata']['content_rating'], 'summary': show_details['metadata']['summary'], 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -483,19 +624,22 @@ class PmsConnect(object): 'directors': show_details['metadata']['directors'] } metadata_list = {'metadata': metadata} + elif metadata_type == 'episode': grandparent_rating_key = helpers.get_xml_attr(metadata_main, 'grandparentRatingKey') show_details = self.get_metadata_details(grandparent_rating_key) metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': show_details['metadata']['studio'], 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': show_details['metadata']['studio'], 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), 'summary': helpers.get_xml_attr(metadata_main, 'summary'), 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -517,45 +661,20 @@ class PmsConnect(object): 'directors': directors } metadata_list = {'metadata': metadata} - elif metadata_type == 'movie': - metadata = {'media_type': metadata_type, - 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': helpers.get_xml_attr(metadata_main, 'studio'), - 'title': helpers.get_xml_attr(metadata_main, 'title'), - 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), - 'summary': helpers.get_xml_attr(metadata_main, 'summary'), - 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), - 'rating': helpers.get_xml_attr(metadata_main, 'rating'), - 'duration': helpers.get_xml_attr(metadata_main, 'duration'), - 'year': helpers.get_xml_attr(metadata_main, 'year'), - 'thumb': helpers.get_xml_attr(metadata_main, 'thumb'), - 'parent_thumb': helpers.get_xml_attr(metadata_main, 'parentThumb'), - 'grandparent_thumb': helpers.get_xml_attr(metadata_main, 'grandparentThumb'), - 'art': helpers.get_xml_attr(metadata_main, 'art'), - 'originally_available_at': helpers.get_xml_attr(metadata_main, 'originallyAvailableAt'), - 'added_at': helpers.get_xml_attr(metadata_main, 'addedAt'), - 'updated_at': helpers.get_xml_attr(metadata_main, 'updatedAt'), - 'last_viewed_at': helpers.get_xml_attr(metadata_main, 'lastViewedAt'), - 'guid': helpers.get_xml_attr(metadata_main, 'guid'), - 'genres': genres, - 'actors': actors, - 'writers': writers, - 'directors': directors - } - metadata_list = {'metadata': metadata} + elif metadata_type == 'artist': metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': helpers.get_xml_attr(metadata_main, 'studio'), + 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': helpers.get_xml_attr(metadata_main, 'studio'), 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), 'summary': helpers.get_xml_attr(metadata_main, 'summary'), 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -577,18 +696,22 @@ class PmsConnect(object): 'actors': actors } metadata_list = {'metadata': metadata} + elif metadata_type == 'album': parent_rating_key = helpers.get_xml_attr(metadata_main, 'parentRatingKey') artist_details = self.get_metadata_details(parent_rating_key) metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': helpers.get_xml_attr(metadata_main, 'studio'), + 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': helpers.get_xml_attr(metadata_main, 'studio'), 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), 'summary': artist_details['metadata']['summary'], 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -610,19 +733,22 @@ class PmsConnect(object): 'directors': directors } metadata_list = {'metadata': metadata} + elif metadata_type == 'track': parent_rating_key = helpers.get_xml_attr(metadata_main, 'parentRatingKey') album_details = self.get_metadata_details(parent_rating_key) metadata = {'media_type': metadata_type, + 'section_id': section_id, + 'library_name': library_name, 'rating_key': helpers.get_xml_attr(metadata_main, 'ratingKey'), 'parent_rating_key': helpers.get_xml_attr(metadata_main, 'parentRatingKey'), 'grandparent_rating_key': helpers.get_xml_attr(metadata_main, 'grandparentRatingKey'), - 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), - 'parent_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), - 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), - 'index': helpers.get_xml_attr(metadata_main, 'index'), - 'studio': helpers.get_xml_attr(metadata_main, 'studio'), 'title': helpers.get_xml_attr(metadata_main, 'title'), + 'parent_title': helpers.get_xml_attr(metadata_main, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(metadata_main, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(metadata_main, 'index'), + 'parent_media_index': helpers.get_xml_attr(metadata_main, 'parentIndex'), + 'studio': helpers.get_xml_attr(metadata_main, 'studio'), 'content_rating': helpers.get_xml_attr(metadata_main, 'contentRating'), 'summary': helpers.get_xml_attr(metadata_main, 'summary'), 'tagline': helpers.get_xml_attr(metadata_main, 'tagline'), @@ -644,25 +770,93 @@ class PmsConnect(object): 'directors': directors } metadata_list = {'metadata': metadata} + else: return None + if get_media_info: + item_media = metadata_main.getElementsByTagName('Media') + for media in item_media: + media_info = {'container': helpers.get_xml_attr(media, 'container'), + 'bitrate': helpers.get_xml_attr(media, 'bitrate'), + 'video_codec': helpers.get_xml_attr(media, 'videoCodec'), + 'video_resolution': helpers.get_xml_attr(media, 'videoResolution'), + 'video_framerate': helpers.get_xml_attr(media, 'videoFrameRate'), + 'audio_codec': helpers.get_xml_attr(media, 'audioCodec'), + 'audio_channels': helpers.get_xml_attr(media, 'audioChannels'), + 'file': helpers.get_xml_attr(media.getElementsByTagName('Part')[0], 'file'), + 'file_size': helpers.get_xml_attr(media.getElementsByTagName('Part')[0], 'size'), + } + metadata.update(media_info) + return metadata_list - """ - Return processed and validated metadata list for all children of requested item. + def get_metadata_children_details(self, rating_key='', get_children=False, get_media_info=False): + """ + Return processed and validated metadata list for all children of requested item. - Parameters required: rating_key { Plex ratingKey } + Parameters required: rating_key { Plex ratingKey } - Output: array - """ - def get_metadata_children_details(self, rating_key=''): + Output: array + """ metadata = self.get_metadata_children(str(rating_key), output_format='xml') try: xml_head = metadata.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_metadata_children.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_metadata_children: %s." % e) + return {'metadata': []} + + metadata_list = [] + + for a in xml_head: + if a.getAttribute('size'): + if a.getAttribute('size') == '0': + metadata_list = {'metadata': []} + return metadata_list + + if a.getElementsByTagName('Video'): + metadata_main = a.getElementsByTagName('Video') + for item in metadata_main: + child_rating_key = helpers.get_xml_attr(item, 'ratingKey') + metadata = self.get_metadata_details(str(child_rating_key), get_media_info) + if metadata: + metadata_list.append(metadata['metadata']) + + elif a.getElementsByTagName('Track'): + metadata_main = a.getElementsByTagName('Track') + for item in metadata_main: + child_rating_key = helpers.get_xml_attr(item, 'ratingKey') + metadata = self.get_metadata_details(str(child_rating_key), get_media_info) + if metadata: + metadata_list.append(metadata['metadata']) + + elif get_children and a.getElementsByTagName('Directory'): + dir_main = a.getElementsByTagName('Directory') + metadata_main = [d for d in dir_main if helpers.get_xml_attr(d, 'ratingKey')] + for item in metadata_main: + child_rating_key = helpers.get_xml_attr(item, 'ratingKey') + metadata = self.get_metadata_children_details(str(child_rating_key), get_children, get_media_info) + if metadata: + metadata_list.extend(metadata['metadata']) + + output = {'metadata': metadata_list} + return output + + def get_library_metadata_details(self, section_id=''): + """ + Return processed and validated metadata list for requested library. + + Parameters required: section_id { Plex library key } + + Output: array + """ + libraries_data = self.get_libraries_list(output_format='xml') + + try: + xml_head = libraries_data.getElementsByTagName('MediaContainer') + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_library_metadata_details: %s." % e) return [] metadata_list = [] @@ -673,37 +867,41 @@ class PmsConnect(object): metadata_list = {'metadata': None} return metadata_list - if a.getElementsByTagName('Video'): - metadata_main = a.getElementsByTagName('Video') - for item in metadata_main: - child_rating_key = helpers.get_xml_attr(item, 'ratingKey') - metadata = self.get_metadata_details(str(child_rating_key)) - if metadata: - metadata_list.append(metadata['metadata']) + if a.getElementsByTagName('Directory'): + result_data = a.getElementsByTagName('Directory') + for result in result_data: + key = helpers.get_xml_attr(result, 'key') + if key == section_id: + metadata = {'media_type': 'library', + 'section_id': helpers.get_xml_attr(result, 'key'), + 'library': helpers.get_xml_attr(result, 'type'), + 'title': helpers.get_xml_attr(result, 'title'), + 'art': helpers.get_xml_attr(result, 'art'), + 'thumb': helpers.get_xml_attr(result, 'thumb') + } + if metadata['library'] == 'movie': + metadata['section_type'] = 'movie' + elif metadata['library'] == 'show': + metadata['section_type'] = 'episode' + elif metadata['library'] == 'artist': + metadata['section_type'] = 'track' - elif a.getElementsByTagName('Track'): - metadata_main = a.getElementsByTagName('Track') - for item in metadata_main: - child_rating_key = helpers.get_xml_attr(item, 'ratingKey') - metadata = self.get_metadata_details(str(child_rating_key)) - if metadata: - metadata_list.append(metadata['metadata']) - - output = {'metadata': metadata_list} - return output + metadata_list = {'metadata': metadata} + + return metadata_list - """ - Return processed and validated session list. - - Output: array - """ def get_current_activity(self): + """ + Return processed and validated session list. + + Output: array + """ session_data = self.get_sessions(output_format='xml') try: xml_head = session_data.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_sessions.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_sessions: %s." % e) return [] session_list = [] @@ -741,15 +939,15 @@ class PmsConnect(object): return output - """ - Return selected data from current sessions. - This function processes and validates session data - - Parameters required: stream_type { track or video } - session { the session dictionary } - Output: dict - """ def get_session_each(self, stream_type='', session=None): + """ + Return selected data from current sessions. + This function processes and validates session data + + Parameters required: stream_type { track or video } + session { the session dictionary } + Output: dict + """ session_output = None user_data = users.Users() @@ -783,7 +981,7 @@ class PmsConnect(object): transcode_container = '' transcode_protocol = '' - user_details = user_data.get_user_details( + user_details = user_data.get_details( user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title')) if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Track'): @@ -792,6 +990,7 @@ class PmsConnect(object): machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier') session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), + 'section_id': helpers.get_xml_attr(session, 'librarySectionID'), 'media_index': helpers.get_xml_attr(session, 'index'), 'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'), 'art': helpers.get_xml_attr(session, 'art'), @@ -802,7 +1001,7 @@ class PmsConnect(object): 'user': user_details['username'], 'user_id': user_details['user_id'], 'friendly_name': user_details['friendly_name'], - 'user_thumb': user_details['thumb'], + 'user_thumb': user_details['user_thumb'], 'ip_address': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'address').split(':')[-1], 'player': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'title'), 'platform': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'platform'), @@ -903,7 +1102,7 @@ class PmsConnect(object): else: use_indexes = 0 - user_details = user_data.get_user_details( + user_details = user_data.get_details( user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title')) if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Video'): @@ -913,6 +1112,7 @@ class PmsConnect(object): if helpers.get_xml_attr(session, 'type') == 'episode': session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), + 'section_id': helpers.get_xml_attr(session, 'librarySectionID'), 'media_index': helpers.get_xml_attr(session, 'index'), 'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'), 'art': helpers.get_xml_attr(session, 'art'), @@ -923,7 +1123,7 @@ class PmsConnect(object): 'user': user_details['username'], 'user_id': user_details['user_id'], 'friendly_name': user_details['friendly_name'], - 'user_thumb': user_details['thumb'], + 'user_thumb': user_details['user_thumb'], 'ip_address': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'address').split(':')[-1], 'player': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'title'), 'platform': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'platform'), @@ -970,6 +1170,7 @@ class PmsConnect(object): elif helpers.get_xml_attr(session, 'type') == 'movie': session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), + 'section_id': helpers.get_xml_attr(session, 'librarySectionID'), 'media_index': helpers.get_xml_attr(session, 'index'), 'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'), 'art': helpers.get_xml_attr(session, 'art'), @@ -980,7 +1181,7 @@ class PmsConnect(object): 'user': user_details['username'], 'user_id': user_details['user_id'], 'friendly_name': user_details['friendly_name'], - 'user_thumb': user_details['thumb'], + 'user_thumb': user_details['user_thumb'], 'ip_address': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'address').split(':')[-1], 'player': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'title'), 'platform': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'platform'), @@ -1027,6 +1228,7 @@ class PmsConnect(object): elif helpers.get_xml_attr(session, 'type') == 'clip': session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), + 'section_id': helpers.get_xml_attr(session, 'librarySectionID'), 'media_index': helpers.get_xml_attr(session, 'index'), 'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'), 'art': helpers.get_xml_attr(session, 'art'), @@ -1037,7 +1239,7 @@ class PmsConnect(object): 'user': user_details['username'], 'user_id': user_details['user_id'], 'friendly_name': user_details['friendly_name'], - 'user_thumb': user_details['thumb'], + 'user_thumb': user_details['user_thumb'], 'ip_address': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'address').split(':')[-1], 'player': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'title'), 'platform': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'platform'), @@ -1108,7 +1310,7 @@ class PmsConnect(object): transcode_container = '' transcode_protocol = '' - user_details = user_data.get_user_details( + user_details = user_data.get_details( user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title')) if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Photo'): @@ -1117,6 +1319,7 @@ class PmsConnect(object): machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier') session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'), + 'section_id': helpers.get_xml_attr(session, 'librarySectionID'), 'media_index': helpers.get_xml_attr(session, 'index'), 'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'), 'art': helpers.get_xml_attr(session, 'art'), @@ -1127,7 +1330,7 @@ class PmsConnect(object): 'user': user_details['username'], 'user_id': user_details['user_id'], 'friendly_name': user_details['friendly_name'], - 'user_thumb': user_details['thumb'], + 'user_thumb': user_details['user_thumb'], 'ip_address': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'address').split(':')[-1], 'player': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'title'), 'platform': helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'platform'), @@ -1170,7 +1373,7 @@ class PmsConnect(object): } else: - logger.warn(u"No known stream types found in session list.") + logger.warn(u"PlexPy Pmsconnect :: No known stream types found in session list.") # Rename Mystery platform names session_output['platform'] = common.PLATFORM_NAME_OVERRIDES.get(session_output['platform'], @@ -1178,18 +1381,18 @@ class PmsConnect(object): return session_output - """ - Return processed and validated children list. - - Output: array - """ def get_item_children(self, rating_key=''): + """ + Return processed and validated children list. + + Output: array + """ children_data = self.get_children_list(rating_key, output_format='xml') try: xml_head = children_data.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_children_list.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_children_list: %s." % e) return [] children_list = [] @@ -1197,7 +1400,7 @@ class PmsConnect(object): for a in xml_head: if a.getAttribute('size'): if a.getAttribute('size') == '0': - logger.debug(u"No children data.") + logger.debug(u"PlexPy Pmsconnect :: No children data.") children_list = {'children_count': '0', 'children_list': [] } @@ -1215,7 +1418,7 @@ class PmsConnect(object): if result_data: for result in result_data: children_output = {'rating_key': helpers.get_xml_attr(result, 'ratingKey'), - 'index': helpers.get_xml_attr(result, 'index'), + 'media_index': helpers.get_xml_attr(result, 'index'), 'title': helpers.get_xml_attr(result, 'title'), 'thumb': helpers.get_xml_attr(result, 'thumb'), 'parent_thumb': helpers.get_xml_attr(a, 'thumb'), @@ -1231,18 +1434,18 @@ class PmsConnect(object): return output - """ - Return the list of local servers. - - Output: array - """ def get_servers_info(self): + """ + Return the list of local servers. + + Output: array + """ recent = self.get_server_list(output_format='xml') try: xml_head = recent.getElementsByTagName('Server') - except: - logger.warn("Unable to parse XML for get_server_list.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_server_list: %s." % e) return [] server_info = [] @@ -1258,18 +1461,18 @@ class PmsConnect(object): return server_info - """ - Return the local machine identity. - - Output: dict - """ def get_server_identity(self): + """ + Return the local machine identity. + + Output: dict + """ identity = self.get_local_server_identity(output_format='xml') try: xml_head = identity.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_local_server_identity.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_local_server_identity: %s." % e) return [] server_identity = {} @@ -1280,21 +1483,21 @@ class PmsConnect(object): return server_identity - """ - Return a specified server preference. - - Parameters required: pref { name of preference } - - Output: string - """ def get_server_pref(self, pref=None): + """ + Return a specified server preference. + + Parameters required: pref { name of preference } + + Output: string + """ if pref: prefs = self.get_server_prefs(output_format='xml') try: xml_head = prefs.getElementsByTagName('Setting') - except: - logger.warn("Unable to parse XML for get_local_server_name.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_local_server_name: %s." % e) return '' pref_value = 'None' @@ -1305,21 +1508,21 @@ class PmsConnect(object): return pref_value else: - logger.debug(u"Server preferences queried but no parameter received.") + logger.debug(u"PlexPy Pmsconnect :: Server preferences queried but no parameter received.") return None - """ - Return processed and validated server libraries list. - - Output: array - """ def get_server_children(self): + """ + Return processed and validated server libraries list. + + Output: array + """ libraries_data = self.get_libraries_list(output_format='xml') try: xml_head = libraries_data.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_libraries_list.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_libraries_list: %s." % e) return [] libraries_list = [] @@ -1327,7 +1530,7 @@ class PmsConnect(object): for a in xml_head: if a.getAttribute('size'): if a.getAttribute('size') == '0': - logger.debug(u"No libraries data.") + logger.debug(u"PlexPy Pmsconnect :: No libraries data.") libraries_list = {'libraries_count': '0', 'libraries_list': [] } @@ -1336,10 +1539,11 @@ class PmsConnect(object): if a.getElementsByTagName('Directory'): result_data = a.getElementsByTagName('Directory') for result in result_data: - libraries_output = {'key': helpers.get_xml_attr(result, 'key'), - 'type': helpers.get_xml_attr(result, 'type'), - 'title': helpers.get_xml_attr(result, 'title'), - 'thumb': helpers.get_xml_attr(result, 'thumb') + libraries_output = {'section_id': helpers.get_xml_attr(result, 'key'), + 'section_type': helpers.get_xml_attr(result, 'type'), + 'section_name': helpers.get_xml_attr(result, 'title'), + 'thumb': helpers.get_xml_attr(result, 'thumb'), + 'art': helpers.get_xml_attr(result, 'art') } libraries_list.append(libraries_output) @@ -1350,70 +1554,129 @@ class PmsConnect(object): return output - """ - Return processed and validated server library items list. + def get_library_children_details(self, section_id='', section_type='', list_type='all', count='', rating_key='', get_media_info=False): + """ + Return processed and validated server library items list. - Parameters required: library_type { movie, show, episode, artist } - section_key { unique library key } + Parameters required: section_type { movie, show, episode, artist } + section_id { unique library key } - Output: array - """ - def get_library_children(self, library_type='', section_key='', list_type='all', sort_type = ''): + Output: array + """ - # Currently only grab the library with 1 items so 'size' is not 0 - count = '1' - - if library_type == 'movie': + if section_type == 'movie': sort_type = '&type=1' - elif library_type == 'show': + elif section_type == 'show': sort_type = '&type=2' - elif library_type == 'episode': + elif section_type == 'season': + sort_type = '&type=3' + elif section_type == 'episode': sort_type = '&type=4' - elif library_type == 'album': - list_type = 'albums' + elif section_type == 'artist': + sort_type = '&type=8' + elif section_type == 'album': + sort_type = '&type=9' + elif section_type == 'track': + sort_type = '&type=10' + elif section_type == 'photo': + sort_type = '' + elif section_type == 'photoAlbum': + sort_type = '&type=14' + elif section_type == 'picture': + sort_type = '&type=13' + else: + sort_type = '' - library_data = self.get_library_list(section_key, list_type, count, sort_type, output_format='xml') - - try: - xml_head = library_data.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_library_children.") + if str(section_id).isdigit(): + library_data = self.get_library_list(section_id, list_type, count, sort_type, output_format='xml') + elif str(rating_key).isdigit(): + library_data = self.get_children_list(rating_key, output_format='xml') + else: + logger.warn(u"PlexPy Pmsconnect :: get_library_children called by invalid section_id or rating_key provided.") return [] - library_list = [] + try: + xml_head = library_data.getElementsByTagName('MediaContainer') + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_library_children: %s." % e) + return [] + + childern_list = [] for a in xml_head: if a.getAttribute('size'): if a.getAttribute('size') == '0': - logger.debug(u"No library data.") - library_list = {'library_count': '0', - 'library_list': [] + logger.debug(u"PlexPy Pmsconnect :: No library data.") + childern_list = {'library_count': '0', + 'childern_list': [] } - return library_list + return childern_list + if rating_key: + library_count = helpers.get_xml_attr(xml_head[0], 'size') + else: + library_count = helpers.get_xml_attr(xml_head[0], 'totalSize') + + # Get show/season info from xml_head + + item_main = [] if a.getElementsByTagName('Directory'): - result_data = a.getElementsByTagName('Directory') - for result in result_data: - library_output = {'key': helpers.get_xml_attr(result, 'key'), - 'type': helpers.get_xml_attr(result, 'type'), - 'title': helpers.get_xml_attr(result, 'title'), - 'thumb': helpers.get_xml_attr(result, 'thumb') + dir_main = a.getElementsByTagName('Directory') + item_main += [d for d in dir_main if helpers.get_xml_attr(d, 'ratingKey')] + if a.getElementsByTagName('Video'): + item_main += a.getElementsByTagName('Video') + if a.getElementsByTagName('Track'): + item_main += a.getElementsByTagName('Track') + if a.getElementsByTagName('Photo'): + item_main += a.getElementsByTagName('Photo') + + for item in item_main: + item_info = {'section_id': helpers.get_xml_attr(a, 'librarySectionID'), + 'media_type': helpers.get_xml_attr(item, 'type'), + 'rating_key': helpers.get_xml_attr(item, 'ratingKey'), + 'parent_rating_key': helpers.get_xml_attr(item, 'parentRatingKey'), + 'grandparent_rating_key': helpers.get_xml_attr(a, 'grandparentRatingKey'), + 'title': helpers.get_xml_attr(item, 'title'), + 'parent_title': helpers.get_xml_attr(a, 'parentTitle'), + 'grandparent_title': helpers.get_xml_attr(a, 'grandparentTitle'), + 'media_index': helpers.get_xml_attr(item, 'index'), + 'parent_media_index': helpers.get_xml_attr(a, 'parentIndex'), + 'year': helpers.get_xml_attr(item, 'year'), + 'thumb': helpers.get_xml_attr(item, 'thumb'), + 'parent_thumb': helpers.get_xml_attr(a, 'thumb'), + 'grandparent_thumb': helpers.get_xml_attr(a, 'grandparentThumb'), + 'added_at': helpers.get_xml_attr(item, 'addedAt') + } + + if get_media_info: + item_media = item.getElementsByTagName('Media') + for media in item_media: + media_info = {'container': helpers.get_xml_attr(media, 'container'), + 'bitrate': helpers.get_xml_attr(media, 'bitrate'), + 'video_codec': helpers.get_xml_attr(media, 'videoCodec'), + 'video_resolution': helpers.get_xml_attr(media, 'videoResolution'), + 'video_framerate': helpers.get_xml_attr(media, 'videoFrameRate'), + 'audio_codec': helpers.get_xml_attr(media, 'audioCodec'), + 'audio_channels': helpers.get_xml_attr(media, 'audioChannels'), + 'file': helpers.get_xml_attr(media.getElementsByTagName('Part')[0], 'file'), + 'file_size': helpers.get_xml_attr(media.getElementsByTagName('Part')[0], 'size'), } - library_list.append(library_output) + item_info.update(media_info) - output = {'library_count': helpers.get_xml_attr(xml_head[0], 'totalSize'), - 'count_type': helpers.get_xml_attr(xml_head[0], 'title2'), - 'library_list': library_list + childern_list.append(item_info) + + output = {'library_count': library_count, + 'childern_list': childern_list } - + return output - """ - Return processed and validated library statistics. + def get_library_details(self): + """ + Return processed and validated library statistics. - Output: array - """ - def get_library_stats(self, library_cards=''): + Output: array + """ server_libraries = self.get_server_children() server_library_stats = [] @@ -1422,49 +1685,60 @@ class PmsConnect(object): libraries_list = server_libraries['libraries_list'] for library in libraries_list: - library_type = library['type'] - section_key = library['key'] - if section_key in library_cards: - library_list = self.get_library_children(library_type, section_key) - else: - continue + section_type = library['section_type'] + section_id = library['section_id'] + children_list = self.get_library_children_details(section_id=section_id, section_type=section_type, count='1') - if library_list['library_count'] != '0': - library_stats = {'title': library['title'], + if children_list['library_count'] != '0': + library_stats = {'section_id': library['section_id'], + 'section_name': library['section_name'], + 'section_type': section_type, 'thumb': library['thumb'], - 'count': library_list['library_count'], - 'count_type': library_list['count_type'] + 'art': library['art'], + 'count': children_list['library_count'] } - if library_type == 'show': - episode_list = self.get_library_children(library_type='episode', section_key=section_key) - episode_stats = {'episode_count': episode_list['library_count'], - 'episode_count_type': 'All Episodes' - } - library_stats.update(episode_stats) + if section_type == 'show': + parent_list = self.get_library_children_details(section_id=section_id, section_type='season', count='1') + parent_stats = {'parent_count': parent_list['library_count']} + library_stats.update(parent_stats) - if library_type == 'artist': - album_list = self.get_library_children(library_type='album', section_key=section_key) - album_stats = {'album_count': album_list['library_count'], - 'album_count_type': 'All Albums' - } - library_stats.update(album_stats) + child_list = self.get_library_children_details(section_id=section_id, section_type='episode', count='1') + child_stats = {'child_count': child_list['library_count']} + library_stats.update(child_stats) - server_library_stats.append({'type': library_type, - 'rows': library_stats}) + if section_type == 'artist': + parent_list = self.get_library_children_details(section_id=section_id, section_type='album', count='1') + parent_stats = {'parent_count': parent_list['library_count']} + library_stats.update(parent_stats) + + child_list = self.get_library_children_details(section_id=section_id, section_type='track', count='1') + child_stats = {'child_count': child_list['library_count']} + library_stats.update(child_stats) + + if section_type == 'photo': + parent_list = self.get_library_children_details(section_id=section_id, section_type='photoAlbum', count='1') + parent_stats = {'parent_count': parent_list['library_count']} + library_stats.update(parent_stats) + + child_list = self.get_library_children_details(section_id=section_id, section_type='picture', count='1') + child_stats = {'child_count': child_list['library_count']} + library_stats.update(child_stats) + + server_library_stats.append(library_stats) return server_library_stats - """ - Return image data as array. - Array contains the image content type and image binary - - Parameters required: img { Plex image location } - Optional parameters: width { the image width } - height { the image height } - Output: array - """ def get_image(self, img=None, width=None, height=None): + """ + Return image data as array. + Array contains the image content type and image binary + + Parameters required: img { Plex image location } + Optional parameters: width { the image width } + height { the image height } + Output: array + """ if img: if width.isdigit() and height.isdigit(): uri = '/photo/:/transcode?url=http://127.0.0.1:32400' + img + '&width=' + width + '&height=' + height @@ -1478,15 +1752,15 @@ class PmsConnect(object): return [request, content_type] else: - logger.error("Image proxy queries but no input received.") + logger.error(u"PlexPy Pmsconnect :: Image proxy queries but no input received.") return None - """ - Return processed list of search results. - - Output: array - """ def get_search_results(self, query=''): + """ + Return processed list of search results. + + Output: array + """ search_results = self.get_search(query=query, output_format='xml') search_results_tracks = self.get_search(query=query, track='&type=10', output_format='xml') @@ -1500,8 +1774,8 @@ class PmsConnect(object): xml_head += search_results_tracks.getElementsByTagName('MediaContainer') except: pass - except: - logger.warn("Unable to parse XML for get_search_result_details.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_search_result_details: %s." % e) return [] search_results_count = 0 @@ -1519,7 +1793,7 @@ class PmsConnect(object): if a.getAttribute('size'): totalSize += int(a.getAttribute('size')) if totalSize == 0: - logger.debug(u"No search results.") + logger.debug(u"PlexPy Pmsconnect :: No search results.") search_results_list = {'results_count': search_results_count, 'results_list': [] } @@ -1574,12 +1848,12 @@ class PmsConnect(object): return output - """ - Return processed list of grandparent/parent/child rating keys. - - Output: array - """ def get_rating_keys_list(self, rating_key='', media_type=''): + """ + Return processed list of grandparent/parent/child rating keys. + + Output: array + """ if media_type == 'movie': key_list = {0: {'rating_key': int(rating_key)}} @@ -1589,22 +1863,29 @@ class PmsConnect(object): match_type = 'title' else: match_type = 'index' - + + section_id = None + library_name = None + # get grandparent rating key if media_type == 'season' or media_type == 'album': try: metadata = self.get_metadata_details(rating_key=rating_key) rating_key = metadata['metadata']['parent_rating_key'] - except: - logger.warn("Unable to get parent_rating_key for get_rating_keys_list.") + section_id = metadata['metadata']['section_id'] + library_name = metadata['metadata']['library_name'] + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to get parent_rating_key for get_rating_keys_list: %s." % e) return {} elif media_type == 'episode' or media_type == 'track': try: metadata = self.get_metadata_details(rating_key=rating_key) rating_key = metadata['metadata']['grandparent_rating_key'] - except: - logger.warn("Unable to get grandparent_rating_key for get_rating_keys_list.") + section_id = metadata['metadata']['section_id'] + library_name = metadata['metadata']['library_name'] + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to get grandparent_rating_key for get_rating_keys_list: %s." % e) return {} # get parent_rating_keys @@ -1612,8 +1893,8 @@ class PmsConnect(object): try: xml_head = metadata.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_rating_keys_list.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e) return {} for a in xml_head: @@ -1640,8 +1921,8 @@ class PmsConnect(object): try: xml_head = metadata.getElementsByTagName('MediaContainer') - except: - logger.warn("Unable to parse XML for get_rating_keys_list.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_rating_keys_list: %s." % e) return {} for a in xml_head: @@ -1668,16 +1949,18 @@ class PmsConnect(object): key = int(parent_index) if match_type == 'index' else parent_title parents.update({key: - {'rating_key': int(parent_rating_key), - 'children': children} - }) + {'rating_key': int(parent_rating_key), + 'children': children} + }) key = 0 if match_type == 'index' else title key_list = {key: {'rating_key': int(rating_key), - 'children': parents} - } - + 'children': parents }, + 'section_id': section_id, + 'library_name': library_name + } + return key_list def get_server_response(self): @@ -1687,8 +1970,8 @@ class PmsConnect(object): try: xml_head = account_data.getElementsByTagName('MyPlex') - except: - logger.warn("Unable to parse XML for get_server_response.") + except Exception as e: + logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_server_response: %s." % e) return None server_response = {} @@ -1700,4 +1983,4 @@ class PmsConnect(object): 'public_port': helpers.get_xml_attr(a, 'publicPort') } - return server_response + return server_response \ No newline at end of file diff --git a/plexpy/users.py b/plexpy/users.py index c7adfedd..0ee051b8 100644 --- a/plexpy/users.py +++ b/plexpy/users.py @@ -21,29 +21,28 @@ class Users(object): def __init__(self): pass - def get_user_list(self, kwargs=None): + def get_datatables_list(self, kwargs=None): data_tables = datatables.DataTables() custom_where = ['users.deleted_user', 0] - columns = ['session_history.id', - 'users.user_id as user_id', - 'users.custom_avatar_url as user_thumb', - '(case when users.friendly_name is null then users.username else \ - users.friendly_name end) as friendly_name', - 'MAX(session_history.started) as last_seen', - 'session_history.ip_address as ip_address', - 'COUNT(session_history.id) as plays', - 'session_history.platform as platform', - 'session_history.player as player', - 'session_history_metadata.full_title as last_watched', + columns = ['users.user_id', + 'users.username', + 'users.friendly_name', + 'users.thumb AS user_thumb', + 'users.custom_avatar_url AS custom_thumb', + 'COUNT(session_history.id) AS plays', + 'MAX(session_history.started) AS last_seen', + 'session_history_metadata.full_title AS last_watched', + 'session_history.ip_address', + 'session_history.platform', + 'session_history.player', 'session_history_metadata.thumb', 'session_history_metadata.parent_thumb', 'session_history_metadata.grandparent_thumb', 'session_history_metadata.media_type', - 'session_history.rating_key as rating_key', + 'session_history.rating_key', 'session_history_media_info.video_decision', - 'users.username as user', 'users.do_notify as do_notify', 'users.keep_history as keep_history' ] @@ -62,8 +61,8 @@ class Users(object): ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id']], kwargs=kwargs) - except: - logger.warn("Unable to execute database query.") + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_list: %s." % e) return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, @@ -74,38 +73,44 @@ class Users(object): rows = [] for item in users: - if item["media_type"] == 'episode' and item["parent_thumb"]: - thumb = item["parent_thumb"] - elif item["media_type"] == 'episode': - thumb = item["grandparent_thumb"] + if item['friendly_name']: + friendly_name = item['friendly_name'] else: - thumb = item["thumb"] + friendly_name = item['username'] + + if item['media_type'] == 'episode' and item['parent_thumb']: + thumb = item['parent_thumb'] + elif item['media_type'] == 'episode': + thumb = item['grandparent_thumb'] + else: + thumb = item['thumb'] - if not item['user_thumb'] or item['user_thumb'] == '': - user_thumb = common.DEFAULT_USER_THUMB - else: + if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']: + user_thumb = item['custom_thumb'] + elif item['user_thumb']: user_thumb = item['user_thumb'] + else: + user_thumb = common.DEFAULT_USER_THUMB # Rename Mystery platform names - platform = common.PLATFORM_NAME_OVERRIDES.get(item["platform"], item["platform"]) + platform = common.PLATFORM_NAME_OVERRIDES.get(item['platform'], item['platform']) - row = {"id": item['id'], - "plays": item['plays'], - "last_seen": item['last_seen'], - "friendly_name": item['friendly_name'], - "ip_address": item['ip_address'], - "platform": platform, - "player": item["player"], - "last_watched": item['last_watched'], - "thumb": thumb, - "media_type": item['media_type'], - "rating_key": item['rating_key'], - "video_decision": item['video_decision'], - "user_thumb": user_thumb, - "user": item["user"], - "user_id": item['user_id'], - "do_notify": helpers.checked(item['do_notify']), - "keep_history": helpers.checked(item['keep_history']) + row = {'user_id': item['user_id'], + 'username': item['username'], + 'friendly_name': friendly_name, + 'user_thumb': user_thumb, + 'plays': item['plays'], + 'last_seen': item['last_seen'], + 'last_watched': item['last_watched'], + 'ip_address': item['ip_address'], + 'platform': platform, + 'player': item['player'], + 'thumb': thumb, + 'media_type': item['media_type'], + 'rating_key': item['rating_key'], + 'video_decision': item['video_decision'], + 'do_notify': helpers.checked(item['do_notify']), + 'keep_history': helpers.checked(item['keep_history']) } rows.append(row) @@ -114,39 +119,38 @@ class Users(object): 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] - } + } return dict - def get_user_unique_ips(self, kwargs=None, custom_where=None): + def get_datatables_unique_ips(self, user_id=None, kwargs=None): data_tables = datatables.DataTables() - # Change custom_where column name due to ambiguous column name after JOIN - custom_where[0][0] = 'custom_user_id' if custom_where[0][0] == 'user_id' else custom_where[0][0] + custom_where = ['users.user_id', user_id] columns = ['session_history.id', - 'session_history.started as last_seen', - 'session_history.ip_address as ip_address', - 'COUNT(session_history.id) as play_count', - 'session_history.platform as platform', - 'session_history.player as player', - 'session_history_metadata.full_title as last_watched', + 'session_history.started AS last_seen', + 'session_history.ip_address', + 'COUNT(session_history.id) AS play_count', + 'session_history.platform', + 'session_history.player', + 'session_history_metadata.full_title AS last_watched', 'session_history_metadata.thumb', 'session_history_metadata.parent_thumb', 'session_history_metadata.grandparent_thumb', 'session_history_metadata.media_type', - 'session_history.rating_key as rating_key', + 'session_history.rating_key', 'session_history_media_info.video_decision', - 'session_history.user as user', + 'session_history.user', 'session_history.user_id as custom_user_id', - '(case when users.friendly_name is null then users.username else \ - users.friendly_name end) as friendly_name' + '(CASE WHEN users.friendly_name IS NULL THEN users.username ELSE \ + users.friendly_name END) AS friendly_name' ] try: query = data_tables.ssp_query(table_name='session_history', columns=columns, - custom_where=custom_where, + custom_where=[custom_where], group_by=['ip_address'], join_types=['JOIN', 'JOIN', @@ -158,8 +162,8 @@ class Users(object): ['session_history.id', 'session_history_metadata.id'], ['session_history.id', 'session_history_media_info.id']], kwargs=kwargs) - except: - logger.warn("Unable to execute database query.") + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_unique_ips: %s." % e) return {'recordsFiltered': 0, 'recordsTotal': 0, 'draw': 0, @@ -180,18 +184,18 @@ class Users(object): # Rename Mystery platform names platform = common.PLATFORM_NAME_OVERRIDES.get(item["platform"], item["platform"]) - row = {"id": item['id'], - "last_seen": item['last_seen'], - "ip_address": item['ip_address'], - "play_count": item['play_count'], - "platform": platform, - "player": item['player'], - "last_watched": item['last_watched'], - "thumb": thumb, - "media_type": item['media_type'], - "rating_key": item['rating_key'], - "video_decision": item['video_decision'], - "friendly_name": item['friendly_name'] + row = {'id': item['id'], + 'last_seen': item['last_seen'], + 'ip_address': item['ip_address'], + 'play_count': item['play_count'], + 'platform': platform, + 'player': item['player'], + 'last_watched': item['last_watched'], + 'thumb': thumb, + 'media_type': item['media_type'], + 'rating_key': item['rating_key'], + 'video_decision': item['video_decision'], + 'friendly_name': item['friendly_name'] } rows.append(row) @@ -200,290 +204,173 @@ class Users(object): 'recordsTotal': query['totalCount'], 'data': rows, 'draw': query['draw'] - } + } return dict - # TODO: The getter and setter for this needs to become a config getter/setter for more than just friendlyname - def set_user_friendly_name(self, user=None, user_id=None, friendly_name=None, do_notify=0, keep_history=1): - if user_id: - if friendly_name.strip() == '': - friendly_name = None - + def set_config(self, user_id=None, friendly_name='', custom_thumb='', do_notify=1, keep_history=1): + if str(user_id).isdigit(): monitor_db = database.MonitorDatabase() - control_value_dict = {"user_id": user_id} - new_value_dict = {"friendly_name": friendly_name, - "do_notify": do_notify, - "keep_history": keep_history} + key_dict = {'user_id': user_id} + value_dict = {'friendly_name': friendly_name, + 'custom_avatar_url': custom_thumb, + 'do_notify': do_notify, + 'keep_history': keep_history} try: - monitor_db.upsert('users', new_value_dict, control_value_dict) - except Exception, e: - logger.debug(u"Uncaught exception %s" % e) - if user: - if friendly_name.strip() == '': - friendly_name = None + monitor_db.upsert('users', value_dict, key_dict) + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for set_config: %s." % e) - monitor_db = database.MonitorDatabase() - - control_value_dict = {"username": user} - new_value_dict = {"friendly_name": friendly_name, - "do_notify": do_notify, - "keep_history": keep_history} - try: - monitor_db.upsert('users', new_value_dict, control_value_dict) - except Exception, e: - logger.debug(u"Uncaught exception %s" % e) - - def set_user_profile_url(self, user=None, user_id=None, profile_url=None): - if user_id: - if profile_url.strip() == '': - profile_url = None - - monitor_db = database.MonitorDatabase() - - control_value_dict = {"user_id": user_id} - new_value_dict = {"custom_avatar_url": profile_url} - try: - monitor_db.upsert('users', new_value_dict, control_value_dict) - except Exception, e: - logger.debug(u"Uncaught exception %s" % e) - if user: - if profile_url.strip() == '': - profile_url = None - - monitor_db = database.MonitorDatabase() - - control_value_dict = {"username": user} - new_value_dict = {"custom_avatar_url": profile_url} - try: - monitor_db.upsert('users', new_value_dict, control_value_dict) - except Exception, e: - logger.debug(u"Uncaught exception %s" % e) - - def get_user_friendly_name(self, user=None, user_id=None): - if user_id: - monitor_db = database.MonitorDatabase() - query = 'select username, ' \ - '(CASE WHEN friendly_name IS NULL THEN username ELSE friendly_name END) as friendly_name,' \ - 'do_notify, keep_history, custom_avatar_url as thumb ' \ - 'FROM users WHERE user_id = ?' - result = monitor_db.select(query, args=[user_id]) - if result: - user_detail = {'user_id': user_id, - 'user': result[0]['username'], - 'friendly_name': result[0]['friendly_name'], - 'thumb': result[0]['thumb'], - 'do_notify': helpers.checked(result[0]['do_notify']), - 'keep_history': helpers.checked(result[0]['keep_history']) - } - return user_detail - else: - user_detail = {'user_id': user_id, - 'user': '', - 'friendly_name': '', - 'do_notify': '', - 'thumb': '', - 'keep_history': ''} - return user_detail - elif user: - monitor_db = database.MonitorDatabase() - query = 'select user_id, ' \ - '(CASE WHEN friendly_name IS NULL THEN username ELSE friendly_name END) as friendly_name,' \ - 'do_notify, keep_history, custom_avatar_url as thumb ' \ - 'FROM users WHERE username = ?' - result = monitor_db.select(query, args=[user]) - if result: - user_detail = {'user_id': result[0]['user_id'], - 'user': user, - 'friendly_name': result[0]['friendly_name'], - 'thumb': result[0]['thumb'], - 'do_notify': helpers.checked(result[0]['do_notify']), - 'keep_history': helpers.checked(result[0]['keep_history'])} - return user_detail - else: - user_detail = {'user_id': None, - 'user': user, - 'friendly_name': '', - 'do_notify': '', - 'thumb': '', - 'keep_history': ''} - return user_detail - - return None - - def get_user_id(self, user=None): - if user: - try: - monitor_db = database.MonitorDatabase() - query = 'select user_id FROM users WHERE username = ?' - result = monitor_db.select_single(query, args=[user]) - if result: - return result['user_id'] - else: - return None - except: - return None - - return None - - def get_user_details(self, user=None, user_id=None): + def get_details(self, user_id=None, user=None): from plexpy import plextv monitor_db = database.MonitorDatabase() - - if user: - query = 'SELECT user_id, username, friendly_name, email, ' \ - 'custom_avatar_url as thumb, is_home_user, is_allow_sync, is_restricted, do_notify ' \ - 'FROM users ' \ - 'WHERE username = ? ' \ - 'UNION ALL ' \ - 'SELECT null, user, null, null, null, null, null, null, null ' \ - 'FROM session_history ' \ - 'WHERE user = ? ' \ - 'GROUP BY user ' \ - 'LIMIT 1' - result = monitor_db.select(query, args=[user, user]) - elif user_id: - query = 'SELECT user_id, username, friendly_name, email, ' \ - 'custom_avatar_url as thumb, is_home_user, is_allow_sync, is_restricted, do_notify ' \ - 'FROM users ' \ - 'WHERE user_id = ? ' \ - 'UNION ALL ' \ - 'SELECT user_id, user, null, null, null, null, null, null, null ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY user ' \ - 'LIMIT 1' - result = monitor_db.select(query, args=[user_id, user_id]) - else: - result = None + + try: + if str(user_id).isdigit(): + query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ + 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \ + 'FROM users ' \ + 'WHERE user_id = ? ' + result = monitor_db.select(query, args=[user_id]) + elif user: + query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ + 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \ + 'FROM users ' \ + 'WHERE username = ? ' + result = monitor_db.select(query, args=[user]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_details: %s." % e) + result = [] if result: user_details = {} for item in result: - if not item['friendly_name']: - friendly_name = item['username'] - else: + if item['friendly_name']: friendly_name = item['friendly_name'] - if not item['thumb'] or item['thumb'] == '': - user_thumb = common.DEFAULT_USER_THUMB else: - user_thumb = item['thumb'] + friendly_name = item['username'] - user_details = {"user_id": item['user_id'], - "username": item['username'], - "friendly_name": friendly_name, - "email": item['email'], - "thumb": user_thumb, - "is_home_user": item['is_home_user'], - "is_allow_sync": item['is_allow_sync'], - "is_restricted": item['is_restricted'], - "do_notify": item['do_notify'] + if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']: + user_thumb = item['custom_thumb'] + elif item['user_thumb']: + user_thumb = item['user_thumb'] + else: + user_thumb = common.DEFAULT_USER_THUMB + + user_details = {'user_id': item['user_id'], + 'username': item['username'], + 'friendly_name': friendly_name, + 'user_thumb': user_thumb, + 'email': item['email'], + 'is_home_user': item['is_home_user'], + 'is_allow_sync': item['is_allow_sync'], + 'is_restricted': item['is_restricted'], + 'do_notify': item['do_notify'], + 'keep_history': item['keep_history'] } return user_details else: - logger.warn(u"PlexPy :: Unable to retrieve user from local database. Requesting user list refresh.") + logger.warn(u"PlexPy Users :: Unable to retrieve user from local database. Requesting user list refresh.") # Let's first refresh the user list to make sure the user isn't newly added and not in the db yet - if user: - # Refresh users - plextv.refresh_users() - query = 'SELECT user_id, username, friendly_name, email, ' \ - 'custom_avatar_url as thumb, is_home_user, is_allow_sync, is_restricted, do_notify ' \ - 'FROM users ' \ - 'WHERE username = ? ' \ - 'UNION ALL ' \ - 'SELECT null, user, null, null, null, null, null, null, null ' \ - 'FROM session_history ' \ - 'WHERE user = ? ' \ - 'GROUP BY user ' \ - 'LIMIT 1' - result = monitor_db.select(query, args=[user, user]) - elif user_id: - # Refresh users - plextv.refresh_users() - query = 'SELECT user_id, username, friendly_name, email, ' \ - 'custom_avatar_url as thumb, is_home_user, is_allow_sync, is_restricted, do_notify ' \ - 'FROM users ' \ - 'WHERE user_id = ? ' \ - 'UNION ALL ' \ - 'SELECT user_id, user, null, null, null, null, null, null, null ' \ - 'FROM session_history ' \ - 'WHERE user_id = ? ' \ - 'GROUP BY user ' \ - 'LIMIT 1' - result = monitor_db.select(query, args=[user_id, user_id]) - else: - result = None + try: + if str(user_id).isdigit(): + # Refresh users + plextv.refresh_users() + query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ + 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \ + 'FROM users ' \ + 'WHERE user_id = ? ' + result = monitor_db.select(query, args=[user_id]) + elif user: + query = 'SELECT user_id, username, friendly_name, thumb AS user_thumb, custom_avatar_url AS custom_thumb, ' \ + 'email, is_home_user, is_allow_sync, is_restricted, do_notify, keep_history ' \ + 'FROM users ' \ + 'WHERE username = ? ' + result = monitor_db.select(query, args=[user]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_details: %s." % e) + result = [] if result: user_details = {} for item in result: - if not item['friendly_name']: - friendly_name = item['username'] - else: + if item['friendly_name']: friendly_name = item['friendly_name'] - if not item['thumb'] or item['thumb'] == '': - user_thumb = common.DEFAULT_USER_THUMB else: - user_thumb = item['thumb'] + friendly_name = item['username'] - user_details = {"user_id": item['user_id'], - "username": item['username'], - "friendly_name": friendly_name, - "email": item['email'], - "thumb": user_thumb, - "is_home_user": item['is_home_user'], - "is_allow_sync": item['is_allow_sync'], - "is_restricted": item['is_restricted'], - "do_notify": item['do_notify'] + if item['custom_thumb'] and item['custom_thumb'] != item['user_thumb']: + user_thumb = item['custom_thumb'] + elif item['user_thumb']: + user_thumb = item['user_thumb'] + else: + user_thumb = common.DEFAULT_USER_THUMB + + user_details = {'user_id': item['user_id'], + 'username': item['username'], + 'friendly_name': friendly_name, + 'user_thumb': user_thumb, + 'email': item['email'], + 'is_home_user': item['is_home_user'], + 'is_allow_sync': item['is_allow_sync'], + 'is_restricted': item['is_restricted'], + 'do_notify': item['do_notify'], + 'keep_history': item['keep_history'] } return user_details else: # If there is no user data we must return something # Use "Local" user to retain compatibility with PlexWatch database value - return {"user_id": None, - "username": 'Local', - "friendly_name": 'Local', - "email": '', - "thumb": '', - "is_home_user": 0, - "is_allow_sync": 0, - "is_restricted": 0, - "do_notify": 0 + return {'user_id': None, + 'username': 'Local', + 'friendly_name': 'Local', + 'user_thumb': common.DEFAULT_USER_THUMB, + 'email': '', + 'is_home_user': 0, + 'is_allow_sync': 0, + 'is_restricted': 0, + 'do_notify': 0, + 'keep_history': 0 } - def get_user_watch_time_stats(self, user=None, user_id=None): + def get_watch_time_stats(self, user_id=None): monitor_db = database.MonitorDatabase() time_queries = [1, 7, 30, 0] user_watch_time_stats = [] for days in time_queries: - if days > 0: - if user_id: - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ - 'COUNT(id) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ - 'AND user_id = ?' % days - result = monitor_db.select(query, args=[user_id]) - elif user: - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ - 'COUNT(id) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ - 'AND user = ?' % days - result = monitor_db.select(query, args=[user]) - else: - query = 'SELECT (SUM(stopped - started) - ' \ - 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ - 'COUNT(id) AS total_plays ' \ - 'FROM session_history ' \ - 'WHERE user = ?' - result = monitor_db.select(query, args=[user]) + try: + if days > 0: + if str(user_id).isdigit(): + query = 'SELECT (SUM(stopped - started) - ' \ + ' SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ + 'COUNT(id) AS total_plays ' \ + 'FROM session_history ' \ + 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \ + 'AND user_id = ?' % days + result = monitor_db.select(query, args=[user_id]) + else: + result = [] + else: + if str(user_id).isdigit(): + query = 'SELECT (SUM(stopped - started) - ' \ + ' SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \ + 'COUNT(id) AS total_plays ' \ + 'FROM session_history ' \ + 'WHERE user_id = ?' + result = monitor_db.select(query, args=[user_id]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_watch_time_stats: %s." % e) + result = [] for item in result: if item['total_time']: @@ -502,14 +389,14 @@ class Users(object): return user_watch_time_stats - def get_user_player_stats(self, user=None, user_id=None): + def get_player_stats(self, user_id=None): monitor_db = database.MonitorDatabase() player_stats = [] result_id = 0 try: - if user_id: + if str(user_id).isdigit(): query = 'SELECT player, COUNT(player) as player_count, platform ' \ 'FROM session_history ' \ 'WHERE user_id = ? ' \ @@ -517,15 +404,10 @@ class Users(object): 'ORDER BY player_count DESC' result = monitor_db.select(query, args=[user_id]) else: - query = 'SELECT player, COUNT(player) as player_count, platform ' \ - 'FROM session_history ' \ - 'WHERE user = ? ' \ - 'GROUP BY player ' \ - 'ORDER BY player_count DESC' - result = monitor_db.select(query, args=[user]) - except: - logger.warn("Unable to execute database query.") - return None + result = [] + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_player_stats: %s." % e) + result = [] for item in result: # Rename Mystery platform names @@ -540,3 +422,140 @@ class Users(object): result_id += 1 return player_stats + + def get_recently_watched(self, user_id=None, limit='10'): + monitor_db = database.MonitorDatabase() + recently_watched = [] + + if not limit.isdigit(): + limit = '10' + + try: + if str(user_id).isdigit(): + query = 'SELECT session_history.id, session_history.media_type, session_history.rating_key, session_history.parent_rating_key, ' \ + 'title, parent_title, grandparent_title, thumb, parent_thumb, grandparent_thumb, media_index, parent_media_index, ' \ + 'year, started, user ' \ + 'FROM session_history_metadata ' \ + 'JOIN session_history ON session_history_metadata.id = session_history.id ' \ + 'WHERE user_id = ? ' \ + 'GROUP BY (CASE WHEN session_history.media_type = "track" THEN session_history.parent_rating_key ' \ + ' ELSE session_history.rating_key END) ' \ + 'ORDER BY started DESC LIMIT ?' + result = monitor_db.select(query, args=[user_id, limit]) + else: + result = [] + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for get_recently_watched: %s." % e) + result = [] + + for row in result: + if row['media_type'] == 'episode' and row['parent_thumb']: + thumb = row['parent_thumb'] + elif row['media_type'] == 'episode': + thumb = row['grandparent_thumb'] + else: + thumb = row['thumb'] + + recent_output = {'row_id': row['id'], + 'media_type': row['media_type'], + 'rating_key': row['rating_key'], + 'title': row['title'], + 'parent_title': row['parent_title'], + 'grandparent_title': row['grandparent_title'], + 'thumb': thumb, + 'media_index': row['media_index'], + 'parent_media_index': row['parent_media_index'], + 'year': row['year'], + 'time': row['started'], + 'user': row['user'] + } + recently_watched.append(recent_output) + + return recently_watched + + def delete_all_history(self, user_id=None): + monitor_db = database.MonitorDatabase() + + try: + if str(user_id).isdigit(): + logger.info(u"PlexPy DataFactory :: Deleting all history for user id %s from database." % user_id) + session_history_media_info_del = \ + monitor_db.action('DELETE FROM ' + 'session_history_media_info ' + 'WHERE session_history_media_info.id IN (SELECT session_history_media_info.id ' + 'FROM session_history_media_info ' + 'JOIN session_history ON session_history_media_info.id = session_history.id ' + 'WHERE session_history.user_id = ?)', [user_id]) + session_history_metadata_del = \ + monitor_db.action('DELETE FROM ' + 'session_history_metadata ' + 'WHERE session_history_metadata.id IN (SELECT session_history_metadata.id ' + 'FROM session_history_metadata ' + 'JOIN session_history ON session_history_metadata.id = session_history.id ' + 'WHERE session_history.user_id = ?)', [user_id]) + session_history_del = \ + monitor_db.action('DELETE FROM ' + 'session_history ' + 'WHERE session_history.user_id = ?', [user_id]) + + return 'Deleted all items for user_id %s.' % user_id + else: + return 'Unable to delete items. Input user_id not valid.' + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for delete_all_history: %s." % e) + + def delete(self, user_id=None): + monitor_db = database.MonitorDatabase() + + try: + if str(user_id).isdigit(): + self.delete_all_history(user_id) + logger.info(u"PlexPy DataFactory :: Deleting user with id %s from database." % user_id) + monitor_db.action('UPDATE users SET deleted_user = 1 WHERE user_id = ?', [user_id]) + monitor_db.action('UPDATE users SET keep_history = 0 WHERE user_id = ?', [user_id]) + monitor_db.action('UPDATE users SET do_notify = 0 WHERE user_id = ?', [user_id]) + + return 'Deleted user with id %s.' % user_id + else: + return 'Unable to delete user, user_id not valid.' + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for delete: %s." % e) + + def undelete(self, user_id=None, username=None): + monitor_db = database.MonitorDatabase() + + try: + if user_id and str(user_id).isdigit(): + logger.info(u"PlexPy DataFactory :: Re-adding user with id %s to database." % user_id) + monitor_db.action('UPDATE users SET deleted_user = 0 WHERE user_id = ?', [user_id]) + monitor_db.action('UPDATE users SET keep_history = 1 WHERE user_id = ?', [user_id]) + monitor_db.action('UPDATE users SET do_notify = 1 WHERE user_id = ?', [user_id]) + + return 'Re-added user with id %s.' % user_id + elif username: + logger.info(u"PlexPy DataFactory :: Re-adding user with username %s to database." % username) + monitor_db.action('UPDATE users SET deleted_user = 0 WHERE username = ?', [username]) + monitor_db.action('UPDATE users SET keep_history = 1 WHERE username = ?', [username]) + monitor_db.action('UPDATE users SET do_notify = 1 WHERE username = ?', [username]) + + return 'Re-added user with username %s.' % username + else: + return 'Unable to re-add user, user_id or username not valid.' + except Exception as e: + logger.warn(u"PlexPy Users :: Unable to execute database query for undelete: %s." % e) + + # Keep method for PlexWatch import + def get_user_id(self, user=None): + if user: + try: + monitor_db = database.MonitorDatabase() + query = 'SELECT user_id FROM users WHERE username = ?' + result = monitor_db.select_single(query, args=[user]) + if result: + return result['user_id'] + else: + return None + except: + return None + + return None \ No newline at end of file diff --git a/plexpy/webserve.py b/plexpy/webserve.py index 79ba325a..1fd52921 100644 --- a/plexpy/webserve.py +++ b/plexpy/webserve.py @@ -13,7 +13,7 @@ # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see . -from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users, helpers +from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users, libraries from plexpy.helpers import checked, radio from mako.lookup import TemplateLookup @@ -64,22 +64,15 @@ class WebInterface(object): else: raise cherrypy.HTTPRedirect("welcome") - @cherrypy.expose - def home(self): - config = { - "home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH, - "home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS, - "home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS, - "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, - "pms_name": plexpy.CONFIG.PMS_NAME - } - return serve_template(templatename="index.html", title="Home", config=config) + + ##### Welcome ##### @cherrypy.expose def welcome(self, **kwargs): config = { "launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER), "refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP), + "refresh_libraries_on_startup": checked(plexpy.CONFIG.REFRESH_LIBRARIES_ON_STARTUP), "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, "pms_ip": plexpy.CONFIG.PMS_IP, "pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE), @@ -107,6 +100,37 @@ class WebInterface(object): else: return serve_template(templatename="welcome.html", title="Welcome", config=config) + @cherrypy.expose + def discover(self, token=''): + """ + Returns the servers that you own as a + list of dicts (formatted for selectize) + """ + # Need to set token so result doesn't return http 401 + plexpy.CONFIG.__setattr__('PMS_TOKEN', token) + plexpy.CONFIG.write() + + plex_tv = plextv.PlexTV() + servers = plex_tv.discover() + + if servers: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(servers) + + + ##### Home ##### + + @cherrypy.expose + def home(self): + config = { + "home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH, + "home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS, + "home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS, + "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, + "pms_name": plexpy.CONFIG.PMS_NAME + } + return serve_template(templatename="index.html", title="Home", config=config) + @cherrypy.expose def get_date_formats(self): if plexpy.CONFIG.DATE_FORMAT: @@ -124,6 +148,43 @@ class WebInterface(object): cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(formats) + @cherrypy.expose + def get_current_activity(self, **kwargs): + + try: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_current_activity() + + data_factory = datafactory.DataFactory() + for session in result['sessions']: + if not session['ip_address']: + ip_address = data_factory.get_session_ip(session['session_key']) + session['ip_address'] = ip_address + + except: + return serve_template(templatename="current_activity.html", data=None) + + if result: + return serve_template(templatename="current_activity.html", data=result) + else: + logger.warn(u"Unable to retrieve data for get_current_activity.") + return serve_template(templatename="current_activity.html", data=None) + + @cherrypy.expose + def get_current_activity_header(self, **kwargs): + + try: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_current_activity() + except: + return serve_template(templatename="current_activity_header.html", data=None) + + if result: + return serve_template(templatename="current_activity_header.html", data=result['stream_count']) + else: + logger.warn(u"Unable to retrieve data for get_current_activity_header.") + return serve_template(templatename="current_activity_header.html", data=None) + @cherrypy.expose def home_stats(self, **kwargs): data_factory = datafactory.DataFactory() @@ -132,7 +193,7 @@ class WebInterface(object): time_range = plexpy.CONFIG.HOME_STATS_LENGTH stats_type = plexpy.CONFIG.HOME_STATS_TYPE stats_count = plexpy.CONFIG.HOME_STATS_COUNT - stats_cards = plexpy.CONFIG.HOME_STATS_CARDS.split(', ') + stats_cards = plexpy.CONFIG.HOME_STATS_CARDS notify_watched_percent = plexpy.CONFIG.NOTIFY_WATCHED_PERCENT stats_data = data_factory.get_home_stats(grouping=grouping, @@ -146,65 +207,325 @@ class WebInterface(object): @cherrypy.expose def library_stats(self, **kwargs): - pms_connect = pmsconnect.PmsConnect() + data_factory = datafactory.DataFactory() - library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS.split(', ') - - if library_cards == ['library_statistics_first']: - library_cards = ['library_statistics'] - server_children = pms_connect.get_server_children() - server_libraries = server_children['libraries_list'] - - for library in server_libraries: - library_cards.append(library['key']) - - plexpy.CONFIG.HOME_LIBRARY_CARDS = ', '.join(library_cards) - plexpy.CONFIG.write() - - stats_data = pms_connect.get_library_stats(library_cards=library_cards) + library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS + stats_data = data_factory.get_library_stats(library_cards=library_cards) + return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data) @cherrypy.expose - def history(self): - return serve_template(templatename="history.html", title="History") + def get_recently_added(self, count='0', **kwargs): + + try: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_recently_added_details(count=count) + except IOError, e: + return serve_template(templatename="recently_added.html", data=None) + + if result: + return serve_template(templatename="recently_added.html", data=result['recently_added']) + else: + logger.warn(u"Unable to retrieve data for get_recently_added.") + return serve_template(templatename="recently_added.html", data=None) + + + ##### Libraries ##### + + @cherrypy.expose + def libraries(self): + config = { + "update_section_ids": plexpy.CONFIG.UPDATE_SECTION_IDS + } + + return serve_template(templatename="libraries.html", title="Libraries", config=config) + + @cherrypy.expose + def get_library_list(self, **kwargs): + + library_data = libraries.Libraries() + library_list = library_data.get_datatables_list(kwargs=kwargs) + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(library_list) + + @cherrypy.expose + def refresh_libraries_list(self, **kwargs): + threading.Thread(target=pmsconnect.refresh_libraries).start() + logger.info(u"Manual libraries list refresh requested.") + + @cherrypy.expose + def library(self, section_id=None): + config = { + "get_file_sizes": plexpy.CONFIG.GET_FILE_SIZES, + "get_file_sizes_hold": plexpy.CONFIG.GET_FILE_SIZES_HOLD + } + + library_data = libraries.Libraries() + if section_id: + try: + library_details = library_data.get_details(section_id=section_id) + except: + logger.warn(u"Unable to retrieve library details for section_id %s " % section_id) + return serve_template(templatename="library.html", title="Library", data=None, config=config) + else: + logger.debug(u"Library page requested but no section_id received.") + return serve_template(templatename="library.html", title="Library", data=None, config=config) + + return serve_template(templatename="library.html", title="Library", data=library_details, config=config) + + @cherrypy.expose + def edit_library_dialog(self, section_id=None, **kwargs): + library_data = libraries.Libraries() + if section_id: + result = library_data.get_details(section_id=section_id) + status_message = '' + else: + result = None + status_message = 'An error occured.' + + return serve_template(templatename="edit_library.html", title="Edit Library", data=result, status_message=status_message) + + @cherrypy.expose + def edit_library(self, section_id=None, **kwargs): + custom_thumb = kwargs.get('custom_thumb', '') + do_notify = kwargs.get('do_notify', 0) + do_notify_created = kwargs.get('do_notify_created', 0) + keep_history = kwargs.get('keep_history', 0) + + library_data = libraries.Libraries() + if section_id: + try: + library_data.set_config(section_id=section_id, + custom_thumb=custom_thumb, + do_notify=do_notify, + do_notify_created=do_notify_created, + keep_history=keep_history) + + status_message = "Successfully updated library." + return status_message + except: + status_message = "Failed to update library." + return status_message + + @cherrypy.expose + def get_library_watch_time_stats(self, section_id=None, **kwargs): + + library_data = libraries.Libraries() + result = library_data.get_watch_time_stats(section_id=section_id) + + if result: + return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") + else: + logger.warn(u"Unable to retrieve data for get_library_watch_time_stats.") + return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + + @cherrypy.expose + def get_library_user_stats(self, section_id=None, **kwargs): + + library_data = libraries.Libraries() + result = library_data.get_user_stats(section_id=section_id) + + if result: + return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats") + else: + logger.warn(u"Unable to retrieve data for get_library_user_stats.") + return serve_template(templatename="library_user_stats.html", data=None, title="Player Stats") + + @cherrypy.expose + def get_library_recently_watched(self, section_id=None, limit='10', **kwargs): + + library_data = libraries.Libraries() + result = library_data.get_recently_watched(section_id=section_id, limit=limit) + + if result: + return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched") + else: + logger.warn(u"Unable to retrieve data for get_library_recently_watched.") + return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + + @cherrypy.expose + def get_library_recently_added(self, section_id=None, limit='10', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_recently_added_details(section_id=section_id, count=limit) + + if result: + return serve_template(templatename="library_recently_added.html", data=result['recently_added'], title="Recently Added") + else: + logger.warn(u"Unable to retrieve data for get_library_recently_added.") + return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added") + + @cherrypy.expose + def get_library_media_info(self, section_id=None, section_type=None, rating_key=None, refresh='', **kwargs): + + if refresh == 'true': + refresh = True + else: + refresh = False + + library_data = libraries.Libraries() + result = library_data.get_datatables_media_info(section_id=section_id, + section_type=section_type, + rating_key=rating_key, + refresh=refresh, + kwargs=kwargs) + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + + @cherrypy.expose + def get_media_info_file_sizes(self, section_id=None, rating_key=None): + get_file_sizes_hold = plexpy.CONFIG.GET_FILE_SIZES_HOLD + section_ids = set(get_file_sizes_hold['section_ids']) + rating_keys = set(get_file_sizes_hold['rating_keys']) + + if (section_id and section_id not in section_ids) or (rating_key and rating_key not in rating_keys): + if section_id: + section_ids.add(section_id) + elif rating_key: + rating_keys.add(rating_key) + plexpy.CONFIG.GET_FILE_SIZES_HOLD = {'section_ids': list(section_ids), 'rating_keys': list(rating_keys)} + + library_data = libraries.Libraries() + result = library_data.get_media_info_file_sizes(section_id=section_id, + rating_key=rating_key) + + if section_id: + section_ids.remove(section_id) + elif rating_key: + rating_keys.remove(rating_key) + plexpy.CONFIG.GET_FILE_SIZES_HOLD = {'section_ids': list(section_ids), 'rating_keys': list(rating_keys)} + else: + result = False + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'success': result}) + + @cherrypy.expose + def get_library_unwatched(self, section_id=None, section_type=None, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_library_children_details(section_id=section_id, + section_type=section_type, + get_media_info=True, + kwargs=kwargs) + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + + @cherrypy.expose + def delete_all_library_history(self, section_id, **kwargs): + library_data = libraries.Libraries() + + if section_id: + delete_row = library_data.delete_all_history(section_id=section_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + @cherrypy.expose + def delete_library(self, section_id, **kwargs): + library_data = libraries.Libraries() + + if section_id: + delete_row = library_data.delete(section_id=section_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + @cherrypy.expose + def undelete_library(self, section_id=None, section_name=None, **kwargs): + library_data = libraries.Libraries() + + if section_id: + delete_row = library_data.undelete(section_id=section_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + elif section_name: + delete_row = library_data.undelete(section_name=section_name) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + @cherrypy.expose + def update_section_ids(self, **kwargs): + + logger.debug(u"Manual database section_id update called.") + + result = libraries.update_section_ids() + + if result: + return "Updated all section_id's in database." + else: + return "Unable to update section_id's in database. See logs for details." + + @cherrypy.expose + def delete_datatable_media_info_cache(self, section_id, **kwargs): + get_file_sizes_hold = plexpy.CONFIG.GET_FILE_SIZES_HOLD + section_ids = set(get_file_sizes_hold['section_ids']) + + if section_id not in section_ids: + if section_id: + library_data = libraries.Libraries() + delete_row = library_data.delete_datatable_media_info_cache(section_id=section_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + else: + return json.dumps({'message': 'Cannot refresh library while getting file sizes.'}) + + ##### Users ##### @cherrypy.expose def users(self): return serve_template(templatename="users.html", title="Users") @cherrypy.expose - def graphs(self): + def get_user_list(self, **kwargs): - config = { - "graph_type": plexpy.CONFIG.GRAPH_TYPE, - "graph_days": plexpy.CONFIG.GRAPH_DAYS, - "graph_tab": plexpy.CONFIG.GRAPH_TAB, - "music_logging_enable": plexpy.CONFIG.MUSIC_LOGGING_ENABLE - } + user_data = users.Users() + user_list = user_data.get_datatables_list(kwargs=kwargs) - return serve_template(templatename="graphs.html", title="Graphs", config=config) + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(user_list) @cherrypy.expose - def sync(self): - return serve_template(templatename="sync.html", title="Synced Items") + def refresh_users_list(self, **kwargs): + threading.Thread(target=plextv.refresh_users).start() + logger.info(u"Manual users list refresh requested.") @cherrypy.expose - def user(self, user=None, user_id=None): + def user(self, user_id=None): user_data = users.Users() if user_id: try: - user_details = user_data.get_user_details(user_id=user_id) + user_details = user_data.get_details(user_id=user_id) except: - logger.warn("Unable to retrieve friendly name for user_id %s " % user_id) - elif user: - try: - user_details = user_data.get_user_details(user=user) - except: - logger.warn("Unable to retrieve friendly name for user %s " % user) + logger.warn(u"Unable to retrieve user details for user_id %s " % user_id) + return serve_template(templatename="user.html", title="User", data=None) else: - logger.debug(u"User page requested but no parameters received.") - raise cherrypy.HTTPRedirect("home") + logger.debug(u"User page requested but no user_id received.") + return serve_template(templatename="user.html", title="User", data=None) return serve_template(templatename="user.html", title="User", data=user_details) @@ -212,10 +533,7 @@ class WebInterface(object): def edit_user_dialog(self, user=None, user_id=None, **kwargs): user_data = users.Users() if user_id: - result = user_data.get_user_friendly_name(user_id=user_id) - status_message = '' - elif user: - result = user_data.get_user_friendly_name(user=user) + result = user_data.get_details(user_id=user_id) status_message = '' else: result = None @@ -224,49 +542,168 @@ class WebInterface(object): return serve_template(templatename="edit_user.html", title="Edit User", data=result, status_message=status_message) @cherrypy.expose - def edit_user(self, user=None, user_id=None, friendly_name=None, **kwargs): - if 'do_notify' in kwargs: - do_notify = kwargs.get('do_notify') - else: - do_notify = 0 - if 'keep_history' in kwargs: - keep_history = kwargs.get('keep_history') - else: - keep_history = 0 - if 'thumb' in kwargs: - custom_avatar = kwargs['thumb'] - else: - custom_avatar = '' + def edit_user(self, user_id=None, **kwargs): + friendly_name = kwargs.get('friendly_name', '') + custom_thumb = kwargs.get('custom_thumb', '') + do_notify = kwargs.get('do_notify', 0) + keep_history = kwargs.get('keep_history', 0) user_data = users.Users() if user_id: try: - user_data.set_user_friendly_name(user_id=user_id, - friendly_name=friendly_name, - do_notify=do_notify, - keep_history=keep_history) - user_data.set_user_profile_url(user_id=user_id, - profile_url=custom_avatar) - + user_data.set_config(user_id=user_id, + friendly_name=friendly_name, + custom_thumb=custom_thumb, + do_notify=do_notify, + keep_history=keep_history) status_message = "Successfully updated user." return status_message except: status_message = "Failed to update user." return status_message - if user: - try: - user_data.set_user_friendly_name(user=user, - friendly_name=friendly_name, - do_notify=do_notify, - keep_history=keep_history) - user_data.set_user_profile_url(user=user, - profile_url=custom_avatar) - status_message = "Successfully updated user." - return status_message - except: - status_message = "Failed to update user." - return status_message + @cherrypy.expose + def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs): + + user_data = users.Users() + result = user_data.get_watch_time_stats(user_id=user_id) + + if result: + return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") + else: + logger.warn(u"Unable to retrieve data for get_user_watch_time_stats.") + return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") + + @cherrypy.expose + def get_user_player_stats(self, user=None, user_id=None, **kwargs): + + user_data = users.Users() + result = user_data.get_player_stats(user_id=user_id) + + if result: + return serve_template(templatename="user_player_stats.html", data=result, title="Player Stats") + else: + logger.warn(u"Unable to retrieve data for get_user_player_stats.") + return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats") + + @cherrypy.expose + def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs): + + user_data = users.Users() + result = user_data.get_recently_watched(user_id=user_id, limit=limit) + + if result: + return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched") + else: + logger.warn(u"Unable to retrieve data for get_user_recently_watched.") + return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") + + @cherrypy.expose + def get_user_ips(self, user_id=None, **kwargs): + + user_data = users.Users() + history = user_data.get_datatables_unique_ips(user_id=user_id, kwargs=kwargs) + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(history) + + @cherrypy.expose + def delete_all_user_history(self, user_id, **kwargs): + user_data = users.Users() + + if user_id: + delete_row = user_data.delete_all_history(user_id=user_id) + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + @cherrypy.expose + def delete_user(self, user_id, **kwargs): + user_data = users.Users() + + if user_id: + delete_row = user_data.delete(user_id=user_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + @cherrypy.expose + def undelete_user(self, user_id=None, username=None, **kwargs): + user_data = users.Users() + + if user_id: + delete_row = user_data.undelete(user_id=user_id) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + elif username: + delete_row = delete_user.undelete(username=username) + + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + + ##### History ##### + + @cherrypy.expose + def history(self): + return serve_template(templatename="history.html", title="History") + + @cherrypy.expose + def get_history(self, user=None, user_id=None, grouping=0, **kwargs): + + if grouping == 'false': + grouping = 0 + else: + grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES + + watched_percent = plexpy.CONFIG.NOTIFY_WATCHED_PERCENT + + custom_where = [] + if user_id: + custom_where.append(['session_history.user_id', user_id]) + elif user: + custom_where.append(['session_history.user', user]) + if 'rating_key' in kwargs: + rating_key = kwargs.get('rating_key', "") + custom_where.append(['session_history.rating_key', rating_key]) + if 'parent_rating_key' in kwargs: + rating_key = kwargs.get('parent_rating_key', "") + custom_where.append(['session_history.parent_rating_key', rating_key]) + if 'grandparent_rating_key' in kwargs: + rating_key = kwargs.get('grandparent_rating_key', "") + custom_where.append(['session_history.grandparent_rating_key', rating_key]) + if 'start_date' in kwargs: + start_date = kwargs.get('start_date', "") + custom_where.append(['strftime("%Y-%m-%d", datetime(started, "unixepoch", "localtime"))', start_date]) + if 'reference_id' in kwargs: + reference_id = kwargs.get('reference_id', "") + custom_where.append(['session_history.reference_id', reference_id]) + if 'section_id' in kwargs: + section_id = kwargs.get('section_id', "") + custom_where.append(['session_history_metadata.section_id', section_id]) + if 'media_type' in kwargs: + media_type = kwargs.get('media_type', "") + if media_type != 'all': + custom_where.append(['session_history.media_type', media_type]) + + data_factory = datafactory.DataFactory() + history = data_factory.get_datatables_history(kwargs=kwargs, custom_where=custom_where, grouping=grouping, watched_percent=watched_percent) + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(history) @cherrypy.expose def get_stream_data(self, row_id=None, user=None, **kwargs): @@ -288,40 +725,217 @@ class WebInterface(object): return serve_template(templatename="ip_address_modal.html", title="IP Address Details", data=ip_address) @cherrypy.expose - def get_user_list(self, **kwargs): + def delete_history_rows(self, row_id, **kwargs): + data_factory = datafactory.DataFactory() - user_data = users.Users() - user_list = user_data.get_user_list(kwargs=kwargs) + if row_id: + delete_row = data_factory.delete_session_history_rows(row_id=row_id) - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(user_list) + if delete_row: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': delete_row}) + else: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps({'message': 'no data received'}) + + + ##### Graphs ##### @cherrypy.expose - def checkGithub(self): - from plexpy import versioncheck + def graphs(self): - versioncheck.checkGithub() - raise cherrypy.HTTPRedirect("home") + config = { + "graph_type": plexpy.CONFIG.GRAPH_TYPE, + "graph_days": plexpy.CONFIG.GRAPH_DAYS, + "graph_tab": plexpy.CONFIG.GRAPH_TAB, + "music_logging_enable": plexpy.CONFIG.MUSIC_LOGGING_ENABLE + } + + return serve_template(templatename="graphs.html", title="Graphs", config=config) + + @cherrypy.expose + def set_graph_config(self, graph_type=None, graph_days=None, graph_tab=None): + if graph_type: + plexpy.CONFIG.__setattr__('GRAPH_TYPE', graph_type) + plexpy.CONFIG.write() + if graph_days: + plexpy.CONFIG.__setattr__('GRAPH_DAYS', graph_days) + plexpy.CONFIG.write() + if graph_tab: + plexpy.CONFIG.__setattr__('GRAPH_TAB', graph_tab) + plexpy.CONFIG.write() + + return "Updated graphs config values." + + @cherrypy.expose + def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_per_day(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_date.") + + @cherrypy.expose + def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_per_dayofweek(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_dayofweek.") + + @cherrypy.expose + def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_per_hourofday(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_hourofday.") + + @cherrypy.expose + def get_plays_per_month(self, y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_per_month(y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_per_month.") + + @cherrypy.expose + def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_top_10_platforms.") + + @cherrypy.expose + def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_top_10_users.") + + @cherrypy.expose + def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_stream_type.") + + @cherrypy.expose + def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_source_resolution.") + + @cherrypy.expose + def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_plays_by_stream_resolution.") + + @cherrypy.expose + def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_stream_type_by_top_10_users.") + + @cherrypy.expose + def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): + + graph = graphs.Graphs() + result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis) + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_stream_type_by_top_10_platforms.") + + @cherrypy.expose + def history_table_modal(self, start_date=None, **kwargs): + + return serve_template(templatename="history_table_modal.html", title="History Data", data=start_date) + + + ##### Sync ##### + + @cherrypy.expose + def sync(self): + return serve_template(templatename="sync.html", title="Synced Items") + + @cherrypy.expose + def get_sync(self, machine_id=None, user_id=None, **kwargs): + + if not machine_id: + machine_id = plexpy.CONFIG.PMS_IDENTIFIER + + plex_tv = plextv.PlexTV() + result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) + + if result: + output = {"data": result} + else: + logger.warn(u"Unable to retrieve data for get_sync.") + output = {"data": []} + + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(output) + + + ##### Logs ##### @cherrypy.expose def logs(self): return serve_template(templatename="logs.html", title="Log", lineList=plexpy.LOG_LIST) - @cherrypy.expose - def clearLogs(self): - plexpy.LOG_LIST = [] - logger.info("Web logs cleared") - raise cherrypy.HTTPRedirect("logs") - - @cherrypy.expose - def toggleVerbose(self): - plexpy.VERBOSE = not plexpy.VERBOSE - logger.initLogger(console=not plexpy.QUIET, - log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE) - logger.info("Verbose toggled, set to %s", plexpy.VERBOSE) - logger.debug("If you read this message, debug logging is available") - raise cherrypy.HTTPRedirect("logs") - @cherrypy.expose def getLog(self, start=0, length=100, **kwargs): start = int(start) @@ -371,16 +985,29 @@ class WebInterface(object): try: log_lines = {'data': log_reader.get_log_tail(window=window)} except: - logger.warn("Unable to retrieve Plex Logs.") + logger.warn(u"Unable to retrieve Plex Logs.") cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(log_lines) @cherrypy.expose - def generateAPI(self): - apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32] - logger.info("New API generated") - return apikey + def clearLogs(self): + plexpy.LOG_LIST = [] + logger.info(u"Web logs cleared") + raise cherrypy.HTTPRedirect("logs") + + @cherrypy.expose + def toggleVerbose(self): + plexpy.VERBOSE = not plexpy.VERBOSE + logger.initLogger(console=not plexpy.QUIET, + log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE) + logger.info(u"Verbose toggled, set to %s", plexpy.VERBOSE) + logger.debug(u"If you read this message, debug logging is available") + raise cherrypy.HTTPRedirect("logs") + + + + ##### Settings ##### @cherrypy.expose def settings(self): @@ -423,6 +1050,7 @@ class WebInterface(object): "pms_uuid": plexpy.CONFIG.PMS_UUID, "date_format": plexpy.CONFIG.DATE_FORMAT, "time_format": plexpy.CONFIG.TIME_FORMAT, + "get_file_sizes": checked(plexpy.CONFIG.GET_FILE_SIZES), "grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY), "grouping_user_history": checked(plexpy.CONFIG.GROUPING_USER_HISTORY), "grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS), @@ -441,6 +1069,8 @@ class WebInterface(object): "monitor_remote_access": checked(plexpy.CONFIG.MONITOR_REMOTE_ACCESS), "monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL, "monitoring_use_websocket": checked(plexpy.CONFIG.MONITORING_USE_WEBSOCKET), + "refresh_libraries_interval": plexpy.CONFIG.REFRESH_LIBRARIES_INTERVAL, + "refresh_libraries_on_startup": checked(plexpy.CONFIG.REFRESH_LIBRARIES_ON_STARTUP), "refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL, "refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP), "ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE), @@ -480,13 +1110,13 @@ class WebInterface(object): "home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH, "home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE), "home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT, - "home_stats_cards": plexpy.CONFIG.HOME_STATS_CARDS, - "home_library_cards": plexpy.CONFIG.HOME_LIBRARY_CARDS, + "home_stats_cards": json.dumps(plexpy.CONFIG.HOME_STATS_CARDS), + "home_library_cards": json.dumps(plexpy.CONFIG.HOME_LIBRARY_CARDS), "buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD, "buffer_wait": plexpy.CONFIG.BUFFER_WAIT, "group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES) } - + return serve_template(templatename="settings.html", title="Settings", config=config) @cherrypy.expose @@ -494,14 +1124,15 @@ class WebInterface(object): # Handle the variable config options. Note - keys with False values aren't getting passed checked_configs = [ - "launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github", + "launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github", "get_file_sizes", "grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl", "movie_notify_enable", "tv_notify_enable", "music_notify_enable", "monitoring_use_websocket", "tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start", "tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop", - "tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup", - "ip_logging_enable", "movie_logging_enable", "tv_logging_enable", "music_logging_enable", - "pms_is_remote", "home_stats_type", "group_history_tables", "notify_consecutive", + "tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", + "refresh_libraries_on_startup", "refresh_users_on_startup", + "ip_logging_enable", "movie_logging_enable", "tv_logging_enable", "music_logging_enable", + "pms_is_remote", "home_stats_type", "group_history_tables", "notify_consecutive", "notify_recently_added", "notify_recently_added_grandparent", "monitor_remote_access" ] for checked_config in checked_configs: @@ -520,9 +1151,15 @@ class WebInterface(object): del kwargs[use_config] # Check if we should refresh our data + refresh_libraries = False refresh_users = False reschedule = False + if 'monitoring_interval' in kwargs and 'refresh_libraries_interval' in kwargs: + if (kwargs['monitoring_interval'] != str(plexpy.CONFIG.MONITORING_INTERVAL)) or \ + (kwargs['refresh_libraries_interval'] != str(plexpy.CONFIG.REFRESH_LIBRARIES_INTERVAL)): + reschedule = True + if 'monitoring_interval' in kwargs and 'refresh_users_interval' in kwargs: if (kwargs['monitoring_interval'] != str(plexpy.CONFIG.MONITORING_INTERVAL)) or \ (kwargs['refresh_users_interval'] != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL)): @@ -538,15 +1175,28 @@ class WebInterface(object): if 'pms_ip' in kwargs: if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP: + refresh_libraries = True refresh_users = True - + + # Remove config with 'hscard-' prefix and change home_stats_cards to list if 'home_stats_cards' in kwargs: - if kwargs['home_stats_cards'] != 'watch_statistics': - kwargs['home_stats_cards'] = ', '.join(kwargs['home_stats_cards']) + for k in kwargs.keys(): + if k.startswith('hscard-'): + del kwargs[k] + kwargs['home_stats_cards'] = kwargs['home_stats_cards'].split(',') + if kwargs['home_stats_cards'] == ['first_run_wizard']: + kwargs['home_stats_cards'] = plexpy.CONFIG.HOME_STATS_CARDS + + # Remove config with 'hlcard-' prefix and change home_library_cards to list if 'home_library_cards' in kwargs: - if kwargs['home_library_cards'] != 'library_statistics': - kwargs['home_library_cards'] = ', '.join(kwargs['home_library_cards']) + for k in kwargs.keys(): + if k.startswith('hlcard-'): + del kwargs[k] + kwargs['home_library_cards'] = kwargs['home_library_cards'].split(',') + + if kwargs['home_library_cards'] == ['first_run_wizard']: + refresh_libraries = True plexpy.CONFIG.process_kwargs(kwargs) @@ -555,854 +1205,24 @@ class WebInterface(object): # Get new server URLs for SSL communications. plextv.get_real_pms_url() - - # Get new server friendly name + + # Get new server friendly name. pmsconnect.get_server_friendly_name() - + # Reconfigure scheduler if intervals changed if reschedule: plexpy.initialize_scheduler() + # Refresh users table if our server IP changes. + if refresh_libraries: + threading.Thread(target=pmsconnect.refresh_libraries).start() + # Refresh users table if our server IP changes. if refresh_users: threading.Thread(target=plextv.refresh_users).start() raise cherrypy.HTTPRedirect("settings") - @cherrypy.expose - def set_notification_config(self, **kwargs): - - for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]: - # the use prefix is fairly nice in the html, but does not match the actual config - kwargs[plain_config] = kwargs[use_config] - del kwargs[use_config] - - plexpy.CONFIG.process_kwargs(kwargs) - - # Write the config - plexpy.CONFIG.write() - - cherrypy.response.status = 200 - - @cherrypy.expose - def do_state_change(self, signal, title, timer): - message = title - quote = self.random_arnold_quotes() - plexpy.SIGNAL = signal - - return serve_template(templatename="shutdown.html", title=title, - message=message, timer=timer, quote=quote) - - @cherrypy.expose - def get_history(self, user=None, user_id=None, grouping=0, **kwargs): - - if grouping == 'false': - grouping = 0 - else: - grouping = plexpy.CONFIG.GROUP_HISTORY_TABLES - - watched_percent = plexpy.CONFIG.NOTIFY_WATCHED_PERCENT - - custom_where = [] - if user_id: - custom_where.append(['session_history.user_id', user_id]) - elif user: - custom_where.append(['session_history.user', user]) - if 'rating_key' in kwargs: - rating_key = kwargs.get('rating_key', "") - custom_where.append(['session_history.rating_key', rating_key]) - if 'parent_rating_key' in kwargs: - rating_key = kwargs.get('parent_rating_key', "") - custom_where.append(['session_history.parent_rating_key', rating_key]) - if 'grandparent_rating_key' in kwargs: - rating_key = kwargs.get('grandparent_rating_key', "") - custom_where.append(['session_history.grandparent_rating_key', rating_key]) - if 'start_date' in kwargs: - start_date = kwargs.get('start_date', "") - custom_where.append(['strftime("%Y-%m-%d", datetime(started, "unixepoch", "localtime"))', start_date]) - if 'reference_id' in kwargs: - reference_id = kwargs.get('reference_id', "") - custom_where.append(['session_history.reference_id', reference_id]) - if 'media_type' in kwargs: - media_type = kwargs.get('media_type', "") - if media_type != 'all': - custom_where.append(['session_history.media_type', media_type]) - - data_factory = datafactory.DataFactory() - history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where, grouping=grouping, watched_percent=watched_percent) - - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(history) - - @cherrypy.expose - def history_table_modal(self, start_date=None, **kwargs): - - return serve_template(templatename="history_table_modal.html", title="History Data", data=start_date) - - @cherrypy.expose - def shutdown(self): - return self.do_state_change('shutdown', 'Shutting Down', 15) - - @cherrypy.expose - def restart(self): - return self.do_state_change('restart', 'Restarting', 30) - - @cherrypy.expose - def update(self): - return self.do_state_change('update', 'Updating', 120) - - @cherrypy.expose - def api(self, *args, **kwargs): - from plexpy.api import Api - - a = Api() - a.checkParams(*args, **kwargs) - - return a.fetchData() - - @cherrypy.expose - def test_notifier(self, config_id=None, subject='PlexPy', body='Test notification', **kwargs): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - - if config_id.isdigit(): - agents = notifiers.available_notification_agents() - for agent in agents: - if int(config_id) == agent['id']: - this_agent = agent - break - else: - this_agent = None - - if this_agent: - logger.debug("Sending test %s notification." % this_agent['name']) - notifiers.send_notification(this_agent['id'], subject, body, **kwargs) - return "Notification sent." - else: - logger.debug("Unable to send test notification, invalid notification agent ID %s." % config_id) - return "Invalid notification agent ID %s." % config_id - else: - logger.debug("Unable to send test notification, no notification agent ID received.") - return "No notification agent ID received." - - @cherrypy.expose - def twitterStep1(self): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - tweet = notifiers.TwitterNotifier() - return tweet._get_authorization() - - @cherrypy.expose - def twitterStep2(self, key): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - tweet = notifiers.TwitterNotifier() - result = tweet._get_credentials(key) - # logger.info(u"result: " + str(result)) - if result: - return "Key verification successful" - else: - return "Unable to verify key" - - @cherrypy.expose - def facebookStep1(self): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - facebook = notifiers.FacebookNotifier() - return facebook._get_authorization() - - @cherrypy.expose - def facebookStep2(self, code): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - facebook = notifiers.FacebookNotifier() - result = facebook._get_credentials(code) - # logger.info(u"result: " + str(result)) - if result: - return "Key verification successful, you may close this page now." - else: - return "Unable to verify key" - - @cherrypy.expose - def osxnotifyregister(self, app): - cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - from osxnotify import registerapp as osxnotify - - result, msg = osxnotify.registerapp(app) - if result: - osx_notify = notifiers.OSX_NOTIFY() - osx_notify.notify('Registered', result, 'Success :-)') - logger.info('Registered %s, to re-register a different app, delete this app first' % result) - else: - logger.warn(msg) - return msg - - @cherrypy.expose - def get_pms_token(self): - - token = plextv.PlexTV() - result = token.get_token() - - if result: - return result - else: - logger.warn('Unable to retrieve Plex.tv token.') - return False - - @cherrypy.expose - def get_pms_sessions_json(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_sessions('json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - return False - - @cherrypy.expose - def get_current_activity(self, **kwargs): - - try: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_current_activity() - - data_factory = datafactory.DataFactory() - for session in result['sessions']: - if not session['ip_address']: - ip_address = data_factory.get_session_ip(session['session_key']) - session['ip_address'] = ip_address - - except: - return serve_template(templatename="current_activity.html", data=None) - - if result: - return serve_template(templatename="current_activity.html", data=result) - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="current_activity.html", data=None) - - @cherrypy.expose - def get_current_activity_header(self, **kwargs): - - try: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_current_activity() - except IOError, e: - return serve_template(templatename="current_activity_header.html", data=None) - - if result: - return serve_template(templatename="current_activity_header.html", data=result['stream_count']) - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="current_activity_header.html", data=None) - - @cherrypy.expose - def get_recently_added(self, count='0', **kwargs): - - try: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_recently_added_details(count) - except IOError, e: - return serve_template(templatename="recently_added.html", data=None) - - if result: - return serve_template(templatename="recently_added.html", data=result['recently_added']) - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="recently_added.html", data=None) - - @cherrypy.expose - def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs): - try: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_image(img, width, height) - cherrypy.response.headers['Content-type'] = result[1] - return result[0] - except: - logger.warn('Image proxy queried but errors occured.') - if fallback == 'poster': - logger.info('Trying fallback image...') - try: - fallback_image = open(self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb') - cherrypy.response.headers['Content-type'] = 'image/png' - return fallback_image - except IOError, e: - logger.error('Unable to read fallback image. %s' % e) - elif fallback == 'cover': - logger.info('Trying fallback image...') - try: - fallback_image = open(self.interface_dir + common.DEFAULT_COVER_THUMB, 'rb') - cherrypy.response.headers['Content-type'] = 'image/png' - return fallback_image - except IOError, e: - logger.error('Unable to read fallback image. %s' % e) - - return None - - @cherrypy.expose - def info(self, item_id=None, source=None, **kwargs): - metadata = None - query = None - - config = { - "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER - } - - if source == 'history': - data_factory = datafactory.DataFactory() - metadata = data_factory.get_metadata_details(row_id=item_id) - elif item_id == 'movie': - metadata = {'media_type': 'library', 'library': 'movie', 'media_type_filter': 'movie', 'title': 'Movies'} - elif item_id == 'show': - metadata = {'media_type': 'library', 'library': 'show', 'media_type_filter': 'episode', 'title': 'TV Shows'} - elif item_id == 'artist': - metadata = {'media_type': 'library', 'library': 'artist', 'media_type_filter': 'track', 'title': 'Music'} - else: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_metadata_details(rating_key=item_id) - if result: - metadata = result['metadata'] - else: - data_factory = datafactory.DataFactory() - query = data_factory.get_search_query(rating_key=item_id) - - if metadata: - return serve_template(templatename="info.html", data=metadata, title="Info", config=config) - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="info.html", data=None, query=query, title="Info") - - @cherrypy.expose - def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs): - - data_factory = datafactory.DataFactory() - result = data_factory.get_recently_watched(user_id=user_id, user=user, limit=limit) - - if result: - return serve_template(templatename="user_recently_watched.html", data=result, - title="Recently Watched") - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="user_recently_watched.html", data=None, - title="Recently Watched") - - @cherrypy.expose - def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs): - - user_data = users.Users() - result = user_data.get_user_watch_time_stats(user_id=user_id, user=user) - - if result: - return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") - - @cherrypy.expose - def get_user_player_stats(self, user=None, user_id=None, **kwargs): - - user_data = users.Users() - result = user_data.get_user_player_stats(user_id=user_id, user=user) - - if result: - return serve_template(templatename="user_player_stats.html", data=result, - title="Player Stats") - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="user_player_stats.html", data=None, title="Player Stats") - - @cherrypy.expose - def get_item_children(self, rating_key='', **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_item_children(rating_key) - - if result: - return serve_template(templatename="info_children_list.html", data=result, title="Children List") - else: - logger.warn('Unable to retrieve data.') - return serve_template(templatename="info_children_list.html", data=None, title="Children List") - - @cherrypy.expose - def get_metadata_json(self, rating_key='', **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_metadata(rating_key, 'json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_metadata_xml(self, rating_key='', **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_metadata(rating_key) - - if result: - cherrypy.response.headers['Content-type'] = 'application/xml' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_recently_added_json(self, count='0', **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_recently_added(count, 'json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_episode_list_json(self, rating_key='', **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_episode_list(rating_key, 'json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_user_ips(self, user_id=None, user=None, **kwargs): - - custom_where = [] - if user_id: - custom_where = [['user_id', user_id]] - elif user: - custom_where = [['user', user]] - - user_data = users.Users() - history = user_data.get_user_unique_ips(kwargs=kwargs, - custom_where=custom_where) - - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(history) - - @cherrypy.expose - def set_graph_config(self, graph_type=None, graph_days=None, graph_tab=None): - if graph_type: - plexpy.CONFIG.__setattr__('GRAPH_TYPE', graph_type) - plexpy.CONFIG.write() - if graph_days: - plexpy.CONFIG.__setattr__('GRAPH_DAYS', graph_days) - plexpy.CONFIG.write() - if graph_tab: - plexpy.CONFIG.__setattr__('GRAPH_TAB', graph_tab) - plexpy.CONFIG.write() - - return "Updated graphs config values." - - @cherrypy.expose - def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_per_day(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_per_dayofweek(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_per_hourofday(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_per_month(self, y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_per_month(y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): - - graph = graphs.Graphs() - result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis) - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_friends_list(self, **kwargs): - - plex_tv = plextv.PlexTV() - result = plex_tv.get_plextv_friends('json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_user_details(self, **kwargs): - - plex_tv = plextv.PlexTV() - result = plex_tv.get_plextv_user_details('json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_server_list(self, **kwargs): - - plex_tv = plextv.PlexTV() - result = plex_tv.get_plextv_server_list('json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_sync_lists(self, machine_id='', **kwargs): - - plex_tv = plextv.PlexTV() - result = plex_tv.get_plextv_sync_lists(machine_id=machine_id, output_format='json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_servers(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_server_list(output_format='json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_servers_info(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_servers_info() - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_server_friendly_name(self, **kwargs): - - result = pmsconnect.get_server_friendly_name() - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_server_prefs(self, pref=None, **kwargs): - - if pref: - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_server_pref(pref=pref) - else: - result = None - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_server_children(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_server_children() - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_activity(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_current_activity() - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_full_users_list(self, **kwargs): - - plex_tv = plextv.PlexTV() - result = plex_tv.get_full_users_list() - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(result) - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def refresh_users_list(self, **kwargs): - threading.Thread(target=plextv.refresh_users).start() - logger.info('Manual user list refresh requested.') - - @cherrypy.expose - def get_sync(self, machine_id=None, user_id=None, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - server_id = pms_connect.get_server_identity() - - plex_tv = plextv.PlexTV() - if not machine_id: - result = plex_tv.get_synced_items(machine_id=server_id['machine_identifier'], user_id=user_id) - else: - result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) - - if result: - output = {"data": result} - else: - logger.warn('Unable to retrieve sync data for user.') - output = {"data": []} - - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps(output) - - @cherrypy.expose - def get_sync_item(self, sync_id, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_sync_item(sync_id, output_format='json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_sync_transcode_queue(self, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_sync_transcode_queue(output_format='json') - - if result: - cherrypy.response.headers['Content-type'] = 'application/json' - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_server_pref(self, pref=None, **kwargs): - - pms_connect = pmsconnect.PmsConnect() - result = pms_connect.get_server_pref(pref=pref) - - if result: - return result - else: - logger.warn('Unable to retrieve data.') - - @cherrypy.expose - def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs): - from plexpy import plexwatch_import - - db_check_msg = plexwatch_import.validate_database(database=database_path, - table_name=table_name) - if db_check_msg == 'success': - threading.Thread(target=plexwatch_import.import_from_plexwatch, - kwargs={'database': database_path, - 'table_name': table_name, - 'import_ignore_interval': import_ignore_interval}).start() - return 'Import has started. Check the PlexPy logs to monitor any problems.' - else: - return db_check_msg - - @cherrypy.expose - def plexwatch_import(self, **kwargs): - return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database") - - @cherrypy.expose - def get_server_id(self, hostname=None, port=None, identifier=None, ssl=0, remote=0, **kwargs): - from plexpy import http_handler - - if hostname and port: - # Set PMS attributes to get the real PMS url - plexpy.CONFIG.__setattr__('PMS_IP', hostname) - plexpy.CONFIG.__setattr__('PMS_PORT', port) - plexpy.CONFIG.__setattr__('PMS_IDENTIFIER', identifier) - plexpy.CONFIG.__setattr__('PMS_SSL', ssl) - plexpy.CONFIG.__setattr__('PMS_IS_REMOTE', remote) - plexpy.CONFIG.write() - - plextv.get_real_pms_url() - - pms_connect = pmsconnect.PmsConnect() - request = pms_connect.get_local_server_identity() - - if request: - cherrypy.response.headers['Content-type'] = 'application/xml' - return request - else: - logger.warn('Unable to retrieve data.') - return None - else: - return None - - @cherrypy.expose - def random_arnold_quotes(self, **kwargs): - from random import randint - quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!', - 'Your clothes, give them to me, now!', - 'Do it!', - 'If it bleeds, we can kill it', - 'See you at the party Richter!', - 'Let off some steam, Bennett', - 'I\'ll be back', - 'Get to the chopper!', - 'Hasta La Vista, Baby!', - 'It\'s not a tumor!', - 'Dillon, you son of a bitch!', - 'Benny!! Screw you!!', - 'Stop whining! You kids are soft. You lack discipline.', - 'Nice night for a walk.', - 'Stick around!', - 'I need your clothes, your boots and your motorcycle.', - 'No, it\'s not a tumor. It\'s not a tumor!', - 'I LIED!', - 'See you at the party, Richter!', - 'Are you Sarah Conner?', - 'I\'m a cop you idiot!', - 'Come with me if you want to live.', - 'Who is your daddy and what does he do?' - ] - - random_number = randint(0, len(quote_list) - 1) - return quote_list[int(random_number)] - @cherrypy.expose def get_notification_agent_config(self, config_id, **kwargs): if config_id.isdigit(): @@ -1439,66 +1259,261 @@ class WebInterface(object): data=this_agent) @cherrypy.expose - def delete_history_rows(self, row_id, **kwargs): - data_factory = datafactory.DataFactory() + def test_notifier(self, config_id=None, subject='PlexPy', body='Test notification', **kwargs): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" - if row_id: - delete_row = data_factory.delete_session_history_rows(row_id=row_id) - - if delete_row: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': delete_row}) + if config_id.isdigit(): + agents = notifiers.available_notification_agents() + for agent in agents: + if int(config_id) == agent['id']: + this_agent = agent + break + else: + this_agent = None + + if this_agent: + logger.debug(u"Sending test %s notification." % this_agent['name']) + notifiers.send_notification(this_agent['id'], subject, body, **kwargs) + return "Notification sent." + else: + logger.debug(u"Unable to send test notification, invalid notification agent ID %s." % config_id) + return "Invalid notification agent ID %s." % config_id else: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': 'no data received'}) + logger.debug(u"Unable to send test notification, no notification agent ID received.") + return "No notification agent ID received." + + @cherrypy.expose + def twitterStep1(self): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + tweet = notifiers.TwitterNotifier() + return tweet._get_authorization() @cherrypy.expose - def delete_all_user_history(self, user_id, **kwargs): - data_factory = datafactory.DataFactory() - - if user_id: - delete_row = data_factory.delete_all_user_history(user_id=user_id) - - if delete_row: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': delete_row}) + def twitterStep2(self, key): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + tweet = notifiers.TwitterNotifier() + result = tweet._get_credentials(key) + # logger.info(u"result: " + str(result)) + if result: + return "Key verification successful" else: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': 'no data received'}) + return "Unable to verify key" + + def facebookStep1(self): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + facebook = notifiers.FacebookNotifier() + return facebook._get_authorization() @cherrypy.expose - def delete_user(self, user_id, **kwargs): - data_factory = datafactory.DataFactory() - - if user_id: - delete_row = data_factory.delete_user(user_id=user_id) - - if delete_row: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': delete_row}) + def facebookStep2(self, code): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + facebook = notifiers.FacebookNotifier() + result = facebook._get_credentials(code) + # logger.info(u"result: " + str(result)) + if result: + return "Key verification successful, you may close this page now." else: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': 'no data received'}) + return "Unable to verify key" @cherrypy.expose - def undelete_user(self, user_id=None, username=None, **kwargs): - data_factory = datafactory.DataFactory() + def osxnotifyregister(self, app): + cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" + from osxnotify import registerapp as osxnotify - if user_id: - delete_row = data_factory.undelete_user(user_id=user_id) - - if delete_row: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': delete_row}) - elif username: - delete_row = data_factory.undelete_user(username=username) - - if delete_row: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': delete_row}) + result, msg = osxnotify.registerapp(app) + if result: + osx_notify = notifiers.OSX_NOTIFY() + osx_notify.notify('Registered', result, 'Success :-)') + # logger.info(u"Registered %s, to re-register a different app, delete this app first" % result) else: - cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': 'no data received'}) + logger.warn(msg) + return msg + + @cherrypy.expose + def set_notification_config(self, **kwargs): + + for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]: + # the use prefix is fairly nice in the html, but does not match the actual config + kwargs[plain_config] = kwargs[use_config] + del kwargs[use_config] + + plexpy.CONFIG.process_kwargs(kwargs) + + # Write the config + plexpy.CONFIG.write() + + cherrypy.response.status = 200 + + @cherrypy.expose + def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs): + from plexpy import plexwatch_import + + db_check_msg = plexwatch_import.validate_database(database=database_path, + table_name=table_name) + if db_check_msg == 'success': + threading.Thread(target=plexwatch_import.import_from_plexwatch, + kwargs={'database': database_path, + 'table_name': table_name, + 'import_ignore_interval': import_ignore_interval}).start() + return 'Import has started. Check the PlexPy logs to monitor any problems.' + else: + return db_check_msg + + @cherrypy.expose + def plexwatch_import(self, **kwargs): + return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database") + + @cherrypy.expose + def get_pms_token(self): + + token = plextv.PlexTV() + result = token.get_token() + + if result: + return result + else: + logger.warn(u"Unable to retrieve Plex.tv token.") + return False + + @cherrypy.expose + def get_server_id(self, hostname=None, port=None, identifier=None, ssl=0, remote=0, **kwargs): + from plexpy import http_handler + + if hostname and port: + # Set PMS attributes to get the real PMS url + plexpy.CONFIG.__setattr__('PMS_IP', hostname) + plexpy.CONFIG.__setattr__('PMS_PORT', port) + plexpy.CONFIG.__setattr__('PMS_IDENTIFIER', identifier) + plexpy.CONFIG.__setattr__('PMS_SSL', ssl) + plexpy.CONFIG.__setattr__('PMS_IS_REMOTE', remote) + plexpy.CONFIG.write() + + plextv.get_real_pms_url() + + pms_connect = pmsconnect.PmsConnect() + request = pms_connect.get_local_server_identity() + + if request: + cherrypy.response.headers['Content-type'] = 'application/xml' + return request + else: + logger.warn(u"Unable to retrieve data for get_server_id.") + return None + else: + return None + + @cherrypy.expose + def get_server_pref(self, pref=None, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_server_pref(pref=pref) + + if result: + return result + else: + logger.warn(u"Unable to retrieve data for get_server_pref.") + + @cherrypy.expose + def generateAPI(self): + apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32] + logger.info(u"New API key generated.") + return apikey + + @cherrypy.expose + def checkGithub(self): + from plexpy import versioncheck + + versioncheck.checkGithub() + raise cherrypy.HTTPRedirect("home") + + @cherrypy.expose + def do_state_change(self, signal, title, timer): + message = title + quote = self.random_arnold_quotes() + plexpy.SIGNAL = signal + + return serve_template(templatename="shutdown.html", title=title, + message=message, timer=timer, quote=quote) + + @cherrypy.expose + def shutdown(self): + return self.do_state_change('shutdown', 'Shutting Down', 15) + + @cherrypy.expose + def restart(self): + return self.do_state_change('restart', 'Restarting', 30) + + @cherrypy.expose + def update(self): + return self.do_state_change('update', 'Updating', 120) + + + ##### Info ##### + + @cherrypy.expose + def info(self, rating_key=None, source=None, query=None, **kwargs): + metadata = None + + config = { + "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER + } + + if source == 'history': + data_factory = datafactory.DataFactory() + metadata = data_factory.get_metadata_details(rating_key=rating_key) + else: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_metadata_details(rating_key=rating_key, get_media_info=True) + if result: + metadata = result['metadata'] + + if metadata: + return serve_template(templatename="info.html", data=metadata, title="Info", config=config) + else: + return self.update_metadata(rating_key, query) + + @cherrypy.expose + def get_item_children(self, rating_key='', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_item_children(rating_key) + + if result: + return serve_template(templatename="info_children_list.html", data=result, title="Children List") + else: + logger.warn(u"Unable to retrieve data for get_item_children.") + return serve_template(templatename="info_children_list.html", data=None, title="Children List") + + @cherrypy.expose + def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs): + try: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_image(img, width, height) + cherrypy.response.headers['Content-type'] = result[1] + return result[0] + except: + logger.warn(u"Image proxy queried but errors occured.") + if fallback == 'poster': + logger.info(u"Trying fallback image...") + try: + fallback_image = open(self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb') + cherrypy.response.headers['Content-type'] = 'image/png' + return fallback_image + except IOError, e: + logger.error(u"Unable to read fallback %s image: %s" % (fallback, e)) + elif fallback == 'cover': + logger.info(u"Trying fallback image...") + try: + fallback_image = open(self.interface_dir + common.DEFAULT_COVER_THUMB, 'rb') + cherrypy.response.headers['Content-type'] = 'image/png' + return fallback_image + except IOError, e: + logger.error(u"Unable to read fallback %s image: %s" % (fallback, e)) + + return None + + + ##### Search ##### @cherrypy.expose def search(self, query=''): @@ -1515,7 +1530,7 @@ class WebInterface(object): cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: - logger.warn('Unable to retrieve data.') + logger.warn(u"Unable to retrieve data for search_results.") @cherrypy.expose def get_search_results_children(self, query, media_type=None, season_index=None, **kwargs): @@ -1527,35 +1542,58 @@ class WebInterface(object): result['results_list'] = {media_type: result['results_list'][media_type]} if media_type == 'season' and season_index: for season in result['results_list']['season']: - if season['index'] == season_index: + if season['media_index'] == season_index: result['results_list']['season'] = [season] break if result: return serve_template(templatename="info_search_results_list.html", data=result, title="Search Result List") else: - logger.warn('Unable to retrieve data.') + logger.warn(u"Unable to retrieve data for get_search_results_children.") return serve_template(templatename="info_search_results_list.html", data=None, title="Search Result List") + + + ##### Update Metadata ##### + @cherrypy.expose - def update_history_rating_key(self, old_rating_key, new_rating_key, media_type, **kwargs): + def update_metadata(self, rating_key=None, query=None, **kwargs): + + query_string = query + + data_factory = datafactory.DataFactory() + query = data_factory.get_search_query(rating_key=rating_key) + if query and query_string: + query['query_string'] = query_string + + if query: + return serve_template(templatename="update_metadata.html", query=query, title="Info") + else: + logger.warn(u"Unable to retrieve data for update_metadata.") + return serve_template(templatename="update_metadata.html", query=query, title="Info") + + @cherrypy.expose + def update_metadata_details(self, old_rating_key, new_rating_key, media_type, **kwargs): data_factory = datafactory.DataFactory() pms_connect = pmsconnect.PmsConnect() - old_key_list = data_factory.get_rating_keys_list(rating_key=old_rating_key, media_type=media_type) - new_key_list = pms_connect.get_rating_keys_list(rating_key=new_rating_key, media_type=media_type) + if new_rating_key: + old_key_list = data_factory.get_rating_keys_list(rating_key=old_rating_key, media_type=media_type) + new_key_list = pms_connect.get_rating_keys_list(rating_key=new_rating_key, media_type=media_type) - update_db = data_factory.update_rating_key(old_key_list=old_key_list, - new_key_list=new_key_list, - media_type=media_type) + result = data_factory.update_metadata(old_key_list=old_key_list, + new_key_list=new_key_list, + media_type=media_type) - if update_db: + if result: cherrypy.response.headers['Content-type'] = 'application/json' - return json.dumps({'message': update_db}) + return json.dumps({'message': result}) else: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps({'message': 'no data received'}) + + # test code @cherrypy.expose def get_new_rating_keys(self, rating_key='', media_type='', **kwargs): @@ -1567,7 +1605,7 @@ class WebInterface(object): cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: - logger.warn('Unable to retrieve data.') + logger.warn(u"Unable to retrieve data for get_new_rating_keys.") @cherrypy.expose def get_old_rating_keys(self, rating_key='', media_type='', **kwargs): @@ -1579,40 +1617,269 @@ class WebInterface(object): cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: - logger.warn('Unable to retrieve data.') + logger.warn(u"Unable to retrieve data for get_old_rating_keys.") + + + ##### API ##### @cherrypy.expose - def get_map_rating_keys(self, old_rating_key, new_rating_key, media_type, **kwargs): + def api(self, *args, **kwargs): + from plexpy.api import Api + + a = Api() + a.checkParams(*args, **kwargs) + + return a.fetchData() + + @cherrypy.expose + def get_pms_sessions_json(self, **kwargs): - data_factory = datafactory.DataFactory() pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_sessions('json') - if new_rating_key: - old_key_list = data_factory.get_rating_keys_list(rating_key=old_rating_key, media_type=media_type) - new_key_list = pms_connect.get_rating_keys_list(rating_key=new_rating_key, media_type=media_type) + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_pms_sessions_json.") + return False - result = data_factory.update_rating_key(old_key_list=old_key_list, - new_key_list=new_key_list, - media_type=media_type) + @cherrypy.expose + def get_metadata_json(self, rating_key='', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_metadata(rating_key, 'json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_metadata_json.") + + @cherrypy.expose + def get_metadata_xml(self, rating_key='', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_metadata(rating_key) + + if result: + cherrypy.response.headers['Content-type'] = 'application/xml' + return result + else: + logger.warn(u"Unable to retrieve data for get_metadata_xml.") + + @cherrypy.expose + def get_recently_added_json(self, count='0', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_recently_added(count, 'json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_recently_added_json.") + + @cherrypy.expose + def get_episode_list_json(self, rating_key='', **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_episode_list(rating_key, 'json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_episode_list_json.") + + @cherrypy.expose + def get_friends_list(self, **kwargs): + + plex_tv = plextv.PlexTV() + result = plex_tv.get_plextv_friends('json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_friends_list.") + + @cherrypy.expose + def get_user_details(self, **kwargs): + + plex_tv = plextv.PlexTV() + result = plex_tv.get_plextv_user_details('json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_user_details.") + + @cherrypy.expose + def get_server_list(self, **kwargs): + + plex_tv = plextv.PlexTV() + result = plex_tv.get_plextv_server_list('json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_server_list.") + + @cherrypy.expose + def get_sync_lists(self, machine_id='', **kwargs): + + plex_tv = plextv.PlexTV() + result = plex_tv.get_plextv_sync_lists(machine_id=machine_id, output_format='json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_sync_lists.") + + @cherrypy.expose + def get_servers(self, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_server_list(output_format='json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_servers.") + + @cherrypy.expose + def get_servers_info(self, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_servers_info() if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: - logger.warn('Unable to retrieve data.') + logger.warn(u"Unable to retrieve data for get_servers_info.") @cherrypy.expose - def discover(self, token=''): - """ - Returns the servers that you own as a - list of dicts (formatted for selectize) - """ - # Need to set token so result dont return http 401 - plexpy.CONFIG.__setattr__('PMS_TOKEN', token) - plexpy.CONFIG.write() + def get_server_friendly_name(self, **kwargs): - result = plextv.PlexTV() - servers = result.discover() - if servers: + result = pmsconnect.get_server_friendly_name() + + if result: cherrypy.response.headers['Content-type'] = 'application/json' - return servers + return result + else: + logger.warn(u"Unable to retrieve data for get_server_friendly_name.") + + @cherrypy.expose + def get_server_prefs(self, pref=None, **kwargs): + + if pref: + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_server_pref(pref=pref) + else: + result = None + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_server_prefs.") + + @cherrypy.expose + def get_library_sections(self, **kwargs): + + library_data = libraries.Libraries() + result = library_data.get_sections() + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_library_sections.") + + @cherrypy.expose + def get_activity(self, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_current_activity() + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_activity.") + + @cherrypy.expose + def get_full_users_list(self, **kwargs): + + plex_tv = plextv.PlexTV() + result = plex_tv.get_full_users_list() + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return json.dumps(result) + else: + logger.warn(u"Unable to retrieve data for get_full_users_list.") + + @cherrypy.expose + def get_sync_item(self, sync_id, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_sync_item(sync_id, output_format='json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_sync_item.") + + @cherrypy.expose + def get_sync_transcode_queue(self, **kwargs): + + pms_connect = pmsconnect.PmsConnect() + result = pms_connect.get_sync_transcode_queue(output_format='json') + + if result: + cherrypy.response.headers['Content-type'] = 'application/json' + return result + else: + logger.warn(u"Unable to retrieve data for get_sync_transcode_queue.") + + + + + @cherrypy.expose + def random_arnold_quotes(self, **kwargs): + from random import randint + quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!', + 'Your clothes, give them to me, now!', + 'Do it!', + 'If it bleeds, we can kill it', + 'See you at the party Richter!', + 'Let off some steam, Bennett', + 'I\'ll be back', + 'Get to the chopper!', + 'Hasta La Vista, Baby!', + 'It\'s not a tumor!', + 'Dillon, you son of a bitch!', + 'Benny!! Screw you!!', + 'Stop whining! You kids are soft. You lack discipline.', + 'Nice night for a walk.', + 'Stick around!', + 'I need your clothes, your boots and your motorcycle.', + 'No, it\'s not a tumor. It\'s not a tumor!', + 'I LIED!', + 'See you at the party, Richter!', + 'Are you Sarah Conner?', + 'I\'m a cop you idiot!', + 'Come with me if you want to live.', + 'Who is your daddy and what does he do?' + ] + + random_number = randint(0, len(quote_list) - 1) + return quote_list[int(random_number)] \ No newline at end of file