mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-07 05:31:15 -07:00
Enable video logging by default.
Move database functions to its own file. Incorporate datatables changes into original file and remove the test file. Remove old plexWatch processing and db files and references.
This commit is contained in:
parent
4cde833f9d
commit
d216c0d76a
13 changed files with 196 additions and 1592 deletions
|
@ -146,16 +146,6 @@
|
||||||
<p class="help-block">Set your preferred time format. <a href="#dateTimeOptionsModal" data-toggle="modal">Click here</a> to see the parameter list.</p>
|
<p class="help-block">Set your preferred time format. <a href="#dateTimeOptionsModal" data-toggle="modal">Click here</a> to see the parameter list.</p>
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
<fieldset>
|
|
||||||
<div class="wellheader">
|
|
||||||
<h3>Plex Logs</h3>
|
|
||||||
</div>
|
|
||||||
<div class="form-group">
|
|
||||||
<label for="pms_logs_folder">Logs Folder</label>
|
|
||||||
<input type="text" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change">
|
|
||||||
<p class="help-block">Set the folder where your Plex Server logs are. <br/><a href="https://support.plex.tv/hc/en-us/articles/200250417-Plex-Media-Server-Log-Files" target="_blank">Click here</a> for help.</p>
|
|
||||||
</div>
|
|
||||||
</fieldset>
|
|
||||||
<br/>
|
<br/>
|
||||||
</div>
|
</div>
|
||||||
<div class="span4">
|
<div class="span4">
|
||||||
|
@ -175,8 +165,8 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="pms_token">PMS Token</label>
|
<label for="pms_token">PMS Token</label>
|
||||||
<input type="text" id="pms_token" name="pms_token" value="${config['pms_token']}" size="30">
|
<input type="text" id="pms_token" name="pms_token" value="${config['pms_token']}" size="30" data-parsley-trigger="change" required>
|
||||||
<p class="help-block">Token for Plex.tv authentication. Leave empty if no authentication is required.</p>
|
<p class="help-block">Token for Plex.tv authentication. This field is required.</p>
|
||||||
</div>
|
</div>
|
||||||
<p class="help-block"><a href="#pms-auth-modal" data-toggle="modal">Fetch Token</a></p>
|
<p class="help-block"><a href="#pms-auth-modal" data-toggle="modal">Fetch Token</a></p>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
|
@ -190,6 +180,17 @@
|
||||||
<input type="checkbox" id="pms_use_bif" name="pms_use_bif" value="1" ${config['pms_use_bif']}> Use BIF thumbs
|
<input type="checkbox" id="pms_use_bif" name="pms_use_bif" value="1" ${config['pms_use_bif']}> Use BIF thumbs
|
||||||
<p class="help-block">If you have media indexing enabled on your server, use these on the activity pane.</p>
|
<p class="help-block">If you have media indexing enabled on your server, use these on the activity pane.</p>
|
||||||
</div>
|
</div>
|
||||||
|
</fieldset>
|
||||||
|
<div class="wellheader">
|
||||||
|
<h3>Plex Logs</h3>
|
||||||
|
</div>
|
||||||
|
<fieldset>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="pms_logs_folder">Logs Folder</label>
|
||||||
|
<input type="text" id="pms_logs_folder" name="pms_logs_folder" value="${config['pms_logs_folder']}" size="30" data-parsley-trigger="change">
|
||||||
|
<p class="help-block">Set the folder where your Plex Server logs are. <br/><a href="https://support.plex.tv/hc/en-us/articles/200250417-Plex-Media-Server-Log-Files" target="_blank">Click here</a> for help.</p>
|
||||||
|
</div>
|
||||||
|
</fieldset>
|
||||||
<!--
|
<!--
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<input type="checkbox" id="grouping_user_history" name="grouping_user_history" value="1" ${config['grouping_user_history']}> User History
|
<input type="checkbox" id="grouping_user_history" name="grouping_user_history" value="1" ${config['grouping_user_history']}> User History
|
||||||
|
@ -200,7 +201,6 @@
|
||||||
<p class="help-block">Enable chart grouping.</p>
|
<p class="help-block">Enable chart grouping.</p>
|
||||||
</div>
|
</div>
|
||||||
-->
|
-->
|
||||||
</fieldset>
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -262,7 +262,7 @@
|
||||||
</div>
|
</div>
|
||||||
<div class="checkbox">
|
<div class="checkbox">
|
||||||
<input type="checkbox" id="music_logging_enable" name="music_logging_enable" value="1" ${config['music_logging_enable']}> Log Music
|
<input type="checkbox" id="music_logging_enable" name="music_logging_enable" value="1" ${config['music_logging_enable']}> Log Music
|
||||||
<p class="help-block">Keep records of all audio items played from your Plex Media Server. Experimental.</p>
|
<p class="help-block">Keep records of all audio items played from your Plex Media Server. VERY experimental.</p>
|
||||||
</div>
|
</div>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
<div class="wellheader">
|
<div class="wellheader">
|
||||||
|
|
|
@ -13,9 +13,6 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
# NZBGet support added by CurlyMo <curlymoo1@gmail.com> as a part of
|
|
||||||
# XBian - XBMC on the Raspberry Pi
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
|
@ -117,7 +117,7 @@ _CONFIG_DEFINITIONS = {
|
||||||
'TWITTER_USERNAME': (str, 'Twitter', ''),
|
'TWITTER_USERNAME': (str, 'Twitter', ''),
|
||||||
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
'UPDATE_DB_INTERVAL': (int, 'General', 24),
|
||||||
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
|
'VERIFY_SSL_CERT': (bool_int, 'Advanced', 1),
|
||||||
'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 0),
|
'VIDEO_LOGGING_ENABLE': (int, 'Monitoring', 1),
|
||||||
'XBMC_ENABLED': (int, 'XBMC', 0),
|
'XBMC_ENABLED': (int, 'XBMC', 0),
|
||||||
'XBMC_HOST': (str, 'XBMC', ''),
|
'XBMC_HOST': (str, 'XBMC', ''),
|
||||||
'XBMC_PASSWORD': (str, 'XBMC', ''),
|
'XBMC_PASSWORD': (str, 'XBMC', ''),
|
||||||
|
|
129
plexpy/database.py
Normal file
129
plexpy/database.py
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
# This file is part of PlexPy.
|
||||||
|
#
|
||||||
|
# PlexPy is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# PlexPy is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
from plexpy import logger
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
import os
|
||||||
|
import plexpy
|
||||||
|
|
||||||
|
|
||||||
|
def drop_session_db():
|
||||||
|
monitor_db = MonitorDatabase()
|
||||||
|
monitor_db.action('DROP TABLE sessions')
|
||||||
|
|
||||||
|
def clear_history_tables():
|
||||||
|
logger.debug(u"PlexPy Database :: Deleting all session_history records... No turning back now bub.")
|
||||||
|
monitor_db = MonitorDatabase()
|
||||||
|
monitor_db.action('DELETE FROM session_history')
|
||||||
|
monitor_db.action('DELETE FROM session_history_media_info')
|
||||||
|
monitor_db.action('DELETE FROM session_history_metadata')
|
||||||
|
monitor_db.action('VACUUM;')
|
||||||
|
|
||||||
|
def db_filename(filename="plexpy.db"):
|
||||||
|
|
||||||
|
return os.path.join(plexpy.DATA_DIR, filename)
|
||||||
|
|
||||||
|
def get_cache_size():
|
||||||
|
# This will protect against typecasting problems produced by empty string and None settings
|
||||||
|
if not plexpy.CONFIG.CACHE_SIZEMB:
|
||||||
|
# sqlite will work with this (very slowly)
|
||||||
|
return 0
|
||||||
|
return int(plexpy.CONFIG.CACHE_SIZEMB)
|
||||||
|
|
||||||
|
|
||||||
|
class MonitorDatabase(object):
|
||||||
|
|
||||||
|
def __init__(self, filename='plexpy.db'):
|
||||||
|
self.filename = filename
|
||||||
|
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
|
||||||
|
# Don't wait for the disk to finish writing
|
||||||
|
self.connection.execute("PRAGMA synchronous = OFF")
|
||||||
|
# Journal disabled since we never do rollbacks
|
||||||
|
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
|
||||||
|
# 64mb of cache memory, probably need to make it user configurable
|
||||||
|
self.connection.execute("PRAGMA cache_size=-%s" % (get_cache_size() * 1024))
|
||||||
|
self.connection.row_factory = sqlite3.Row
|
||||||
|
|
||||||
|
def action(self, query, args=None, return_last_id=False):
|
||||||
|
|
||||||
|
if query is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
sql_result = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
with self.connection as c:
|
||||||
|
if args is None:
|
||||||
|
sql_result = c.execute(query)
|
||||||
|
else:
|
||||||
|
sql_result = c.execute(query, args)
|
||||||
|
|
||||||
|
except sqlite3.OperationalError, e:
|
||||||
|
if "unable to open database file" in e.message or "database is locked" in e.message:
|
||||||
|
logger.warn('Database Error: %s', e)
|
||||||
|
else:
|
||||||
|
logger.error('Database error: %s', e)
|
||||||
|
raise
|
||||||
|
|
||||||
|
except sqlite3.DatabaseError, e:
|
||||||
|
logger.error('Fatal Error executing %s :: %s', query, e)
|
||||||
|
raise
|
||||||
|
|
||||||
|
return sql_result
|
||||||
|
|
||||||
|
def select(self, query, args=None):
|
||||||
|
|
||||||
|
sql_results = self.action(query, args).fetchall()
|
||||||
|
|
||||||
|
if sql_results is None or sql_results == [None]:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return sql_results
|
||||||
|
|
||||||
|
def select_single(self, query, args=None):
|
||||||
|
|
||||||
|
sql_results = self.action(query, args).fetchone()[0]
|
||||||
|
|
||||||
|
if sql_results is None or sql_results == "":
|
||||||
|
return ""
|
||||||
|
|
||||||
|
return sql_results
|
||||||
|
|
||||||
|
def upsert(self, table_name, value_dict, key_dict):
|
||||||
|
|
||||||
|
trans_type = 'update'
|
||||||
|
changes_before = self.connection.total_changes
|
||||||
|
|
||||||
|
gen_params = lambda my_dict: [x + " = ?" for x in my_dict.keys()]
|
||||||
|
|
||||||
|
update_query = "UPDATE " + table_name + " SET " + ", ".join(gen_params(value_dict)) + \
|
||||||
|
" WHERE " + " AND ".join(gen_params(key_dict))
|
||||||
|
|
||||||
|
self.action(update_query, value_dict.values() + key_dict.values())
|
||||||
|
|
||||||
|
if self.connection.total_changes == changes_before:
|
||||||
|
trans_type = 'insert'
|
||||||
|
insert_query = (
|
||||||
|
"INSERT INTO " + table_name + " (" + ", ".join(value_dict.keys() + key_dict.keys()) + ")" +
|
||||||
|
" VALUES (" + ", ".join(["?"] * len(value_dict.keys() + key_dict.keys())) + ")"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
self.action(insert_query, value_dict.values() + key_dict.values())
|
||||||
|
except sqlite3.IntegrityError:
|
||||||
|
logger.info('Queries failed: %s and %s', update_query, insert_query)
|
||||||
|
|
||||||
|
# We want to know if it was an update or insert
|
||||||
|
return trans_type
|
|
@ -13,11 +13,9 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, helpers, datatables_new, common, monitor
|
from plexpy import logger, datatables, common, database
|
||||||
from xml.dom import minidom
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import plexpy
|
|
||||||
|
|
||||||
|
|
||||||
class DataFactory(object):
|
class DataFactory(object):
|
||||||
|
@ -29,7 +27,7 @@ class DataFactory(object):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_user_list(self, start='', length='', kwargs=None):
|
def get_user_list(self, start='', length='', kwargs=None):
|
||||||
data_tables = datatables_new.DataTables()
|
data_tables = datatables.DataTables()
|
||||||
|
|
||||||
start = int(start)
|
start = int(start)
|
||||||
length = int(length)
|
length = int(length)
|
||||||
|
@ -114,7 +112,7 @@ class DataFactory(object):
|
||||||
return dict
|
return dict
|
||||||
|
|
||||||
def get_history(self, start='', length='', kwargs=None, custom_where=''):
|
def get_history(self, start='', length='', kwargs=None, custom_where=''):
|
||||||
data_tables = datatables_new.DataTables()
|
data_tables = datatables.DataTables()
|
||||||
|
|
||||||
start = int(start)
|
start = int(start)
|
||||||
length = int(length)
|
length = int(length)
|
||||||
|
@ -234,7 +232,7 @@ class DataFactory(object):
|
||||||
return dict
|
return dict
|
||||||
|
|
||||||
def get_user_unique_ips(self, start='', length='', kwargs=None, custom_where=''):
|
def get_user_unique_ips(self, start='', length='', kwargs=None, custom_where=''):
|
||||||
data_tables = datatables_new.DataTables()
|
data_tables = datatables.DataTables()
|
||||||
|
|
||||||
start = int(start)
|
start = int(start)
|
||||||
length = int(length)
|
length = int(length)
|
||||||
|
@ -311,7 +309,7 @@ class DataFactory(object):
|
||||||
if friendly_name.strip() == '':
|
if friendly_name.strip() == '':
|
||||||
friendly_name = None
|
friendly_name = None
|
||||||
|
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
control_value_dict = {"username": user}
|
control_value_dict = {"username": user}
|
||||||
new_value_dict = {"friendly_name": friendly_name}
|
new_value_dict = {"friendly_name": friendly_name}
|
||||||
|
@ -323,7 +321,7 @@ class DataFactory(object):
|
||||||
def get_user_friendly_name(self, user=None):
|
def get_user_friendly_name(self, user=None):
|
||||||
if user:
|
if user:
|
||||||
try:
|
try:
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
query = 'select friendly_name FROM users WHERE username = ?'
|
query = 'select friendly_name FROM users WHERE username = ?'
|
||||||
result = monitor_db.select_single(query, args=[user])
|
result = monitor_db.select_single(query, args=[user])
|
||||||
if result:
|
if result:
|
||||||
|
@ -338,7 +336,7 @@ class DataFactory(object):
|
||||||
def get_user_id(self, user=None):
|
def get_user_id(self, user=None):
|
||||||
if user:
|
if user:
|
||||||
try:
|
try:
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
query = 'select user_id FROM users WHERE username = ?'
|
query = 'select user_id FROM users WHERE username = ?'
|
||||||
result = monitor_db.select_single(query, args=[user])
|
result = monitor_db.select_single(query, args=[user])
|
||||||
if result:
|
if result:
|
||||||
|
@ -352,7 +350,7 @@ class DataFactory(object):
|
||||||
|
|
||||||
def get_user_details(self, user=None, user_id=None):
|
def get_user_details(self, user=None, user_id=None):
|
||||||
try:
|
try:
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if user:
|
if user:
|
||||||
query = 'SELECT user_id, username, friendly_name, email, ' \
|
query = 'SELECT user_id, username, friendly_name, email, ' \
|
||||||
|
@ -399,7 +397,7 @@ class DataFactory(object):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_home_stats(self, time_range='30'):
|
def get_home_stats(self, time_range='30'):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if not time_range.isdigit():
|
if not time_range.isdigit():
|
||||||
time_range = '30'
|
time_range = '30'
|
||||||
|
@ -568,7 +566,7 @@ class DataFactory(object):
|
||||||
return home_stats
|
return home_stats
|
||||||
|
|
||||||
def get_stream_details(self, row_id=None):
|
def get_stream_details(self, row_id=None):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if row_id:
|
if row_id:
|
||||||
query = 'SELECT container, bitrate, video_resolution, width, height, aspect_ratio, video_framerate, ' \
|
query = 'SELECT container, bitrate, video_resolution, width, height, aspect_ratio, video_framerate, ' \
|
||||||
|
@ -611,7 +609,7 @@ class DataFactory(object):
|
||||||
return stream_output
|
return stream_output
|
||||||
|
|
||||||
def get_recently_watched(self, user=None, limit='10'):
|
def get_recently_watched(self, user=None, limit='10'):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
recently_watched = []
|
recently_watched = []
|
||||||
|
|
||||||
if not limit.isdigit():
|
if not limit.isdigit():
|
||||||
|
@ -657,7 +655,7 @@ class DataFactory(object):
|
||||||
return recently_watched
|
return recently_watched
|
||||||
|
|
||||||
def get_user_watch_time_stats(self, user=None):
|
def get_user_watch_time_stats(self, user=None):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
time_queries = [1, 7, 30, 0]
|
time_queries = [1, 7, 30, 0]
|
||||||
user_watch_time_stats = []
|
user_watch_time_stats = []
|
||||||
|
@ -697,7 +695,7 @@ class DataFactory(object):
|
||||||
return user_watch_time_stats
|
return user_watch_time_stats
|
||||||
|
|
||||||
def get_user_platform_stats(self, user=None):
|
def get_user_platform_stats(self, user=None):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
platform_stats = []
|
platform_stats = []
|
||||||
result_id = 0
|
result_id = 0
|
||||||
|
@ -725,7 +723,7 @@ class DataFactory(object):
|
||||||
return platform_stats
|
return platform_stats
|
||||||
|
|
||||||
def get_total_plays_per_day(self, time_range='30'):
|
def get_total_plays_per_day(self, time_range='30'):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if not time_range.isdigit():
|
if not time_range.isdigit():
|
||||||
time_range = '30'
|
time_range = '30'
|
||||||
|
@ -780,7 +778,7 @@ class DataFactory(object):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def get_total_plays_per_dayofweek(self, time_range='30'):
|
def get_total_plays_per_dayofweek(self, time_range='30'):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if not time_range.isdigit():
|
if not time_range.isdigit():
|
||||||
time_range = '30'
|
time_range = '30'
|
||||||
|
@ -829,7 +827,7 @@ class DataFactory(object):
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def get_total_plays_per_hourofday(self, time_range='30'):
|
def get_total_plays_per_hourofday(self, time_range='30'):
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if not time_range.isdigit():
|
if not time_range.isdigit():
|
||||||
time_range = '30'
|
time_range = '30'
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
|
|
||||||
# TODO: Implement with sqlite3 directly instead of using db class
|
# TODO: Implement with sqlite3 directly instead of using db class
|
||||||
|
|
||||||
from plexpy import logger, helpers, db
|
from plexpy import logger, helpers, database
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
@ -26,7 +26,8 @@ class DataTables(object):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.ssp_db = db.DBConnection()
|
self.ssp_db = database.MonitorDatabase()
|
||||||
|
logger.debug(u"Database initilised!")
|
||||||
|
|
||||||
# TODO: Pass all parameters via kwargs
|
# TODO: Pass all parameters via kwargs
|
||||||
def ssp_query(self, table_name,
|
def ssp_query(self, table_name,
|
||||||
|
@ -57,12 +58,20 @@ class DataTables(object):
|
||||||
join = ''
|
join = ''
|
||||||
|
|
||||||
if join_type:
|
if join_type:
|
||||||
if join_type.upper() == 'LEFT OUTER JOIN':
|
join_iter = 0
|
||||||
join = 'LEFT OUTER JOIN %s ON %s = %s' % (join_table, join_evals[0], join_evals[1])
|
for join_type_item in join_type:
|
||||||
elif join_type.upper() == 'JOIN' or join_type.upper() == 'INNER JOIN':
|
if join_type_item.upper() == 'LEFT OUTER JOIN':
|
||||||
join = 'INNER JOIN %s ON %s = %s' % (join_table, join_evals[0], join_evals[1])
|
join_item = 'LEFT OUTER JOIN %s ON %s = %s ' % \
|
||||||
else:
|
(join_table[join_iter], join_evals[join_iter][0], join_evals[join_iter][1])
|
||||||
join = ''
|
elif join_type_item.upper() == 'JOIN' or join_type.upper() == 'INNER JOIN':
|
||||||
|
join_item = 'INNER JOIN %s ON %s = %s ' % \
|
||||||
|
(join_table[join_iter], join_evals[join_iter][0], join_evals[join_iter][1])
|
||||||
|
else:
|
||||||
|
join_item = ''
|
||||||
|
join_iter += 1
|
||||||
|
join += join_item
|
||||||
|
|
||||||
|
logger.debug(u"join string = %s" % join)
|
||||||
|
|
||||||
# TODO: custom_where is ugly and causes issues with reported total results
|
# TODO: custom_where is ugly and causes issues with reported total results
|
||||||
if custom_where != '':
|
if custom_where != '':
|
||||||
|
@ -87,7 +96,7 @@ class DataTables(object):
|
||||||
% (column_data['column_string'], table_name, join, where,
|
% (column_data['column_string'], table_name, join, where,
|
||||||
order, custom_where)
|
order, custom_where)
|
||||||
|
|
||||||
# logger.debug(u"Query string: %s" % query)
|
logger.debug(u"Query string: %s" % query)
|
||||||
filtered = self.ssp_db.select(query)
|
filtered = self.ssp_db.select(query)
|
||||||
|
|
||||||
if search_value == '':
|
if search_value == '':
|
||||||
|
|
|
@ -1,222 +0,0 @@
|
||||||
# This file is part of PlexPy.
|
|
||||||
#
|
|
||||||
# PlexPy is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# PlexPy is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
# TODO: Implement with sqlite3 directly instead of using db class
|
|
||||||
|
|
||||||
from plexpy import logger, helpers, monitor
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class DataTables(object):
|
|
||||||
"""
|
|
||||||
Server side processing for Datatables
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.ssp_db = monitor.MonitorDatabase()
|
|
||||||
logger.debug(u"Database initilised!")
|
|
||||||
|
|
||||||
# TODO: Pass all parameters via kwargs
|
|
||||||
def ssp_query(self, table_name,
|
|
||||||
columns=[],
|
|
||||||
start=0,
|
|
||||||
length=0,
|
|
||||||
order_column=0,
|
|
||||||
order_dir='asc',
|
|
||||||
search_value='',
|
|
||||||
search_regex='',
|
|
||||||
custom_where='',
|
|
||||||
group_by='',
|
|
||||||
join_type=None,
|
|
||||||
join_table=None,
|
|
||||||
join_evals=None,
|
|
||||||
kwargs=None):
|
|
||||||
|
|
||||||
parameters = self.process_kwargs(kwargs)
|
|
||||||
|
|
||||||
if group_by != '':
|
|
||||||
grouping = True
|
|
||||||
else:
|
|
||||||
grouping = False
|
|
||||||
|
|
||||||
column_data = self.extract_columns(columns)
|
|
||||||
where = self.construct_where(column_data, search_value, grouping, parameters)
|
|
||||||
order = self.construct_order(column_data, order_column, order_dir, parameters, table_name, grouping)
|
|
||||||
join = ''
|
|
||||||
|
|
||||||
if join_type:
|
|
||||||
join_iter = 0
|
|
||||||
for join_type_item in join_type:
|
|
||||||
if join_type_item.upper() == 'LEFT OUTER JOIN':
|
|
||||||
join_item = 'LEFT OUTER JOIN %s ON %s = %s ' % \
|
|
||||||
(join_table[join_iter], join_evals[join_iter][0], join_evals[join_iter][1])
|
|
||||||
elif join_type_item.upper() == 'JOIN' or join_type.upper() == 'INNER JOIN':
|
|
||||||
join_item = 'INNER JOIN %s ON %s = %s ' % \
|
|
||||||
(join_table[join_iter], join_evals[join_iter][0], join_evals[join_iter][1])
|
|
||||||
else:
|
|
||||||
join_item = ''
|
|
||||||
join_iter += 1
|
|
||||||
join += join_item
|
|
||||||
|
|
||||||
logger.debug(u"join string = %s" % join)
|
|
||||||
|
|
||||||
# TODO: custom_where is ugly and causes issues with reported total results
|
|
||||||
if custom_where != '':
|
|
||||||
custom_where = 'WHERE (' + custom_where + ')'
|
|
||||||
|
|
||||||
if grouping:
|
|
||||||
if custom_where == '':
|
|
||||||
query = 'SELECT * FROM (SELECT %s FROM %s %s GROUP BY %s) %s %s' \
|
|
||||||
% (column_data['column_string'], table_name, join, group_by,
|
|
||||||
where, order)
|
|
||||||
else:
|
|
||||||
query = 'SELECT * FROM (SELECT * FROM (SELECT %s FROM %s %s GROUP BY %s) %s %s) %s' \
|
|
||||||
% (column_data['column_string'], table_name, join, group_by,
|
|
||||||
where, order, custom_where)
|
|
||||||
else:
|
|
||||||
if custom_where == '':
|
|
||||||
query = 'SELECT %s FROM %s %s %s %s' \
|
|
||||||
% (column_data['column_string'], table_name, join, where,
|
|
||||||
order)
|
|
||||||
else:
|
|
||||||
query = 'SELECT * FROM (SELECT %s FROM %s %s %s %s) %s' \
|
|
||||||
% (column_data['column_string'], table_name, join, where,
|
|
||||||
order, custom_where)
|
|
||||||
|
|
||||||
logger.debug(u"Query string: %s" % query)
|
|
||||||
filtered = self.ssp_db.select(query)
|
|
||||||
|
|
||||||
if search_value == '':
|
|
||||||
totalcount = len(filtered)
|
|
||||||
else:
|
|
||||||
totalcount = self.ssp_db.select('SELECT COUNT(*) from %s' % table_name)[0][0]
|
|
||||||
|
|
||||||
result = filtered[start:(start + length)]
|
|
||||||
output = {'result': result,
|
|
||||||
'filteredCount': len(filtered),
|
|
||||||
'totalCount': totalcount}
|
|
||||||
|
|
||||||
return output
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def construct_order(column_data, order_column, order_dir, parameters=None, table_name=None, grouped=False):
|
|
||||||
order = ''
|
|
||||||
if grouped:
|
|
||||||
sort_col = column_data['column_named'][order_column]
|
|
||||||
else:
|
|
||||||
sort_col = column_data['column_order'][order_column]
|
|
||||||
if parameters:
|
|
||||||
for parameter in parameters:
|
|
||||||
if parameter['data'] != '':
|
|
||||||
if int(order_column) == parameter['index']:
|
|
||||||
if parameter['data'] in column_data['column_named'] and parameter['orderable'] == 'true':
|
|
||||||
if table_name and table_name != '':
|
|
||||||
order = 'ORDER BY %s COLLATE NOCASE %s' % (sort_col, order_dir)
|
|
||||||
else:
|
|
||||||
order = 'ORDER BY %s COLLATE NOCASE %s' % (sort_col, order_dir)
|
|
||||||
else:
|
|
||||||
order = 'ORDER BY %s COLLATE NOCASE %s' % (sort_col, order_dir)
|
|
||||||
|
|
||||||
return order
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def construct_where(column_data, search_value='', grouping=False, parameters=None):
|
|
||||||
if search_value != '':
|
|
||||||
where = 'WHERE '
|
|
||||||
if parameters:
|
|
||||||
for column in column_data['column_named']:
|
|
||||||
search_skip = False
|
|
||||||
for parameter in parameters:
|
|
||||||
if column.rpartition('.')[-1] in parameter['data']:
|
|
||||||
if parameter['searchable'] == 'true':
|
|
||||||
where += column + ' LIKE "%' + search_value + '%" OR '
|
|
||||||
search_skip = True
|
|
||||||
else:
|
|
||||||
search_skip = True
|
|
||||||
|
|
||||||
if not search_skip:
|
|
||||||
where += column + ' LIKE "%' + search_value + '%" OR '
|
|
||||||
else:
|
|
||||||
for column in column_data['column_named']:
|
|
||||||
where += column + ' LIKE "%' + search_value + '%" OR '
|
|
||||||
|
|
||||||
# TODO: This will break the query if all parameters are excluded
|
|
||||||
where = where[:-4]
|
|
||||||
|
|
||||||
return where
|
|
||||||
else:
|
|
||||||
where = ''
|
|
||||||
|
|
||||||
return where
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def extract_columns(columns=[]):
|
|
||||||
columns_string = ''
|
|
||||||
columns_literal = []
|
|
||||||
columns_named = []
|
|
||||||
columns_order = []
|
|
||||||
|
|
||||||
for column in columns:
|
|
||||||
columns_string += column
|
|
||||||
columns_string += ', '
|
|
||||||
# TODO: make this case insensitive
|
|
||||||
if ' as ' in column:
|
|
||||||
columns_literal.append(column.rpartition(' as ')[0])
|
|
||||||
columns_named.append(column.rpartition(' as ')[-1].rpartition('.')[-1])
|
|
||||||
columns_order.append(column.rpartition(' as ')[-1])
|
|
||||||
else:
|
|
||||||
columns_literal.append(column)
|
|
||||||
columns_named.append(column.rpartition('.')[-1])
|
|
||||||
columns_order.append(column)
|
|
||||||
|
|
||||||
columns_string = columns_string[:-2]
|
|
||||||
|
|
||||||
column_data = {'column_string': columns_string,
|
|
||||||
'column_literal': columns_literal,
|
|
||||||
'column_named': columns_named,
|
|
||||||
'column_order': columns_order
|
|
||||||
}
|
|
||||||
|
|
||||||
return column_data
|
|
||||||
|
|
||||||
# TODO: Fix this method. Should not break if kwarg list is not sorted.
|
|
||||||
def process_kwargs(self, kwargs):
|
|
||||||
|
|
||||||
column_parameters = []
|
|
||||||
|
|
||||||
for kwarg in sorted(kwargs):
|
|
||||||
if re.search(r"\[(\w+)\]", kwarg) and kwarg[:7] == 'columns':
|
|
||||||
parameters = re.findall(r"\[(\w+)\]", kwarg)
|
|
||||||
array_index = ''
|
|
||||||
for parameter in parameters:
|
|
||||||
pass_complete = False
|
|
||||||
if parameter.isdigit():
|
|
||||||
array_index = parameter
|
|
||||||
if parameter == 'data':
|
|
||||||
data = kwargs.get('columns[' + array_index + '][data]', "")
|
|
||||||
if parameter == 'orderable':
|
|
||||||
orderable = kwargs.get('columns[' + array_index + '][orderable]', "")
|
|
||||||
if parameter == 'searchable':
|
|
||||||
searchable = kwargs.get('columns[' + array_index + '][searchable]', "")
|
|
||||||
pass_complete = True
|
|
||||||
if pass_complete:
|
|
||||||
row = {'index': int(array_index),
|
|
||||||
'data': data,
|
|
||||||
'searchable': searchable,
|
|
||||||
'orderable': orderable}
|
|
||||||
column_parameters.append(row)
|
|
||||||
|
|
||||||
return sorted(column_parameters, key=lambda i: i['index'])
|
|
121
plexpy/db.py
121
plexpy/db.py
|
@ -1,121 +0,0 @@
|
||||||
# This file is part of PlexPy.
|
|
||||||
#
|
|
||||||
# PlexPy is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# PlexPy is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
#####################################
|
|
||||||
## Stolen from Sick-Beard's db.py ##
|
|
||||||
#####################################
|
|
||||||
|
|
||||||
from __future__ import with_statement
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
import plexpy
|
|
||||||
|
|
||||||
from plexpy import logger
|
|
||||||
|
|
||||||
|
|
||||||
def dbFilename(filename):
|
|
||||||
|
|
||||||
return os.path.join(plexpy.DATA_DIR, filename)
|
|
||||||
|
|
||||||
|
|
||||||
def getCacheSize():
|
|
||||||
#this will protect against typecasting problems produced by empty string and None settings
|
|
||||||
if not plexpy.CONFIG.CACHE_SIZEMB:
|
|
||||||
#sqlite will work with this (very slowly)
|
|
||||||
return 0
|
|
||||||
return int(plexpy.CONFIG.CACHE_SIZEMB)
|
|
||||||
|
|
||||||
|
|
||||||
class DBConnection:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
|
|
||||||
self.filename = plexpy.CONFIG.PLEXWATCH_DATABASE
|
|
||||||
#self.connection = sqlite3.connect(dbFilename(plexpy.CONFIG.PLEXWATCH_DATABASE), timeout=20)
|
|
||||||
self.connection = sqlite3.connect(plexpy.CONFIG.PLEXWATCH_DATABASE, timeout=20)
|
|
||||||
#don't wait for the disk to finish writing
|
|
||||||
self.connection.execute("PRAGMA synchronous = OFF")
|
|
||||||
#journal disabled since we never do rollbacks
|
|
||||||
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
|
|
||||||
#64mb of cache memory,probably need to make it user configurable
|
|
||||||
self.connection.execute("PRAGMA cache_size=-%s" % (getCacheSize() * 1024))
|
|
||||||
self.connection.row_factory = sqlite3.Row
|
|
||||||
|
|
||||||
def action(self, query, args=None):
|
|
||||||
|
|
||||||
if query is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
sqlResult = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
with self.connection as c:
|
|
||||||
if args is None:
|
|
||||||
sqlResult = c.execute(query)
|
|
||||||
else:
|
|
||||||
sqlResult = c.execute(query, args)
|
|
||||||
|
|
||||||
except sqlite3.OperationalError, e:
|
|
||||||
if "unable to open database file" in e.message or "database is locked" in e.message:
|
|
||||||
logger.warn('Database Error: %s', e)
|
|
||||||
else:
|
|
||||||
logger.error('Database error: %s', e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
except sqlite3.DatabaseError, e:
|
|
||||||
logger.error('Fatal Error executing %s :: %s', query, e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
return sqlResult
|
|
||||||
|
|
||||||
def select(self, query, args=None):
|
|
||||||
|
|
||||||
sqlResults = self.action(query, args).fetchall()
|
|
||||||
|
|
||||||
if sqlResults is None or sqlResults == [None]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return sqlResults
|
|
||||||
|
|
||||||
def select_single(self, query, args=None):
|
|
||||||
|
|
||||||
sqlResult = self.action(query, args).fetchone()[0]
|
|
||||||
|
|
||||||
if sqlResult is None or sqlResult == "":
|
|
||||||
return ""
|
|
||||||
|
|
||||||
return sqlResult
|
|
||||||
|
|
||||||
def upsert(self, tableName, valueDict, keyDict):
|
|
||||||
|
|
||||||
changesBefore = self.connection.total_changes
|
|
||||||
|
|
||||||
genParams = lambda myDict: [x + " = ?" for x in myDict.keys()]
|
|
||||||
|
|
||||||
update_query = "UPDATE " + tableName + " SET " + ", ".join(genParams(valueDict)) + " WHERE " + " AND ".join(genParams(keyDict))
|
|
||||||
|
|
||||||
self.action(update_query, valueDict.values() + keyDict.values())
|
|
||||||
|
|
||||||
if self.connection.total_changes == changesBefore:
|
|
||||||
insert_query = (
|
|
||||||
"INSERT INTO " + tableName + " (" + ", ".join(valueDict.keys() + keyDict.keys()) + ")" +
|
|
||||||
" VALUES (" + ", ".join(["?"] * len(valueDict.keys() + keyDict.keys())) + ")"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
self.action(insert_query, valueDict.values() + keyDict.values())
|
|
||||||
except sqlite3.IntegrityError:
|
|
||||||
logger.info('Queries failed: %s and %s', update_query, insert_query)
|
|
|
@ -13,10 +13,8 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, helpers, pmsconnect, notification_handler, config, log_reader, common
|
from plexpy import logger, pmsconnect, notification_handler, log_reader, common, database
|
||||||
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
import threading
|
import threading
|
||||||
import plexpy
|
import plexpy
|
||||||
import re
|
import re
|
||||||
|
@ -29,7 +27,7 @@ def check_active_sessions():
|
||||||
with monitor_lock:
|
with monitor_lock:
|
||||||
pms_connect = pmsconnect.PmsConnect()
|
pms_connect = pmsconnect.PmsConnect()
|
||||||
session_list = pms_connect.get_current_activity()
|
session_list = pms_connect.get_current_activity()
|
||||||
monitor_db = MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
monitor_process = MonitorProcessing()
|
monitor_process = MonitorProcessing()
|
||||||
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
# logger.debug(u"PlexPy Monitor :: Checking for active streams.")
|
||||||
|
|
||||||
|
@ -126,119 +124,11 @@ def check_active_sessions():
|
||||||
else:
|
else:
|
||||||
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
logger.debug(u"PlexPy Monitor :: Unable to read session list.")
|
||||||
|
|
||||||
def drop_session_db():
|
|
||||||
monitor_db = MonitorDatabase()
|
|
||||||
monitor_db.action('DROP TABLE sessions')
|
|
||||||
|
|
||||||
def clear_history_tables():
|
|
||||||
logger.debug(u"PlexPy Monitor :: Deleting all session_history records... No turning back now bub.")
|
|
||||||
monitor_db = MonitorDatabase()
|
|
||||||
monitor_db.action('DELETE FROM session_history')
|
|
||||||
monitor_db.action('DELETE FROM session_history_media_info')
|
|
||||||
monitor_db.action('DELETE FROM session_history_metadata')
|
|
||||||
monitor_db.action('VACUUM;')
|
|
||||||
|
|
||||||
def db_filename(filename="plexpy.db"):
|
|
||||||
|
|
||||||
return os.path.join(plexpy.DATA_DIR, filename)
|
|
||||||
|
|
||||||
def get_cache_size():
|
|
||||||
# This will protect against typecasting problems produced by empty string and None settings
|
|
||||||
if not plexpy.CONFIG.CACHE_SIZEMB:
|
|
||||||
# sqlite will work with this (very slowly)
|
|
||||||
return 0
|
|
||||||
return int(plexpy.CONFIG.CACHE_SIZEMB)
|
|
||||||
|
|
||||||
|
|
||||||
class MonitorDatabase(object):
|
|
||||||
|
|
||||||
def __init__(self, filename='plexpy.db'):
|
|
||||||
self.filename = filename
|
|
||||||
self.connection = sqlite3.connect(db_filename(filename), timeout=20)
|
|
||||||
# Don't wait for the disk to finish writing
|
|
||||||
self.connection.execute("PRAGMA synchronous = OFF")
|
|
||||||
# Journal disabled since we never do rollbacks
|
|
||||||
self.connection.execute("PRAGMA journal_mode = %s" % plexpy.CONFIG.JOURNAL_MODE)
|
|
||||||
# 64mb of cache memory, probably need to make it user configurable
|
|
||||||
self.connection.execute("PRAGMA cache_size=-%s" % (get_cache_size() * 1024))
|
|
||||||
self.connection.row_factory = sqlite3.Row
|
|
||||||
|
|
||||||
def action(self, query, args=None, return_last_id=False):
|
|
||||||
|
|
||||||
if query is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
sql_result = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
with self.connection as c:
|
|
||||||
if args is None:
|
|
||||||
sql_result = c.execute(query)
|
|
||||||
else:
|
|
||||||
sql_result = c.execute(query, args)
|
|
||||||
|
|
||||||
except sqlite3.OperationalError, e:
|
|
||||||
if "unable to open database file" in e.message or "database is locked" in e.message:
|
|
||||||
logger.warn('Database Error: %s', e)
|
|
||||||
else:
|
|
||||||
logger.error('Database error: %s', e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
except sqlite3.DatabaseError, e:
|
|
||||||
logger.error('Fatal Error executing %s :: %s', query, e)
|
|
||||||
raise
|
|
||||||
|
|
||||||
return sql_result
|
|
||||||
|
|
||||||
def select(self, query, args=None):
|
|
||||||
|
|
||||||
sql_results = self.action(query, args).fetchall()
|
|
||||||
|
|
||||||
if sql_results is None or sql_results == [None]:
|
|
||||||
return []
|
|
||||||
|
|
||||||
return sql_results
|
|
||||||
|
|
||||||
def select_single(self, query, args=None):
|
|
||||||
|
|
||||||
sql_results = self.action(query, args).fetchone()[0]
|
|
||||||
|
|
||||||
if sql_results is None or sql_results == "":
|
|
||||||
return ""
|
|
||||||
|
|
||||||
return sql_results
|
|
||||||
|
|
||||||
def upsert(self, table_name, value_dict, key_dict):
|
|
||||||
|
|
||||||
trans_type = 'update'
|
|
||||||
changes_before = self.connection.total_changes
|
|
||||||
|
|
||||||
gen_params = lambda my_dict: [x + " = ?" for x in my_dict.keys()]
|
|
||||||
|
|
||||||
update_query = "UPDATE " + table_name + " SET " + ", ".join(gen_params(value_dict)) + \
|
|
||||||
" WHERE " + " AND ".join(gen_params(key_dict))
|
|
||||||
|
|
||||||
self.action(update_query, value_dict.values() + key_dict.values())
|
|
||||||
|
|
||||||
if self.connection.total_changes == changes_before:
|
|
||||||
trans_type = 'insert'
|
|
||||||
insert_query = (
|
|
||||||
"INSERT INTO " + table_name + " (" + ", ".join(value_dict.keys() + key_dict.keys()) + ")" +
|
|
||||||
" VALUES (" + ", ".join(["?"] * len(value_dict.keys() + key_dict.keys())) + ")"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
self.action(insert_query, value_dict.values() + key_dict.values())
|
|
||||||
except sqlite3.IntegrityError:
|
|
||||||
logger.info('Queries failed: %s and %s', update_query, insert_query)
|
|
||||||
|
|
||||||
# We want to know if it was an update or insert
|
|
||||||
return trans_type
|
|
||||||
|
|
||||||
|
|
||||||
class MonitorProcessing(object):
|
class MonitorProcessing(object):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.db = MonitorDatabase()
|
self.db = database.MonitorDatabase()
|
||||||
|
|
||||||
def write_session(self, session=None):
|
def write_session(self, session=None):
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, helpers, plexwatch, db, http_handler, monitor
|
from plexpy import logger, helpers, datafactory, http_handler, database
|
||||||
|
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ import plexpy
|
||||||
def refresh_users():
|
def refresh_users():
|
||||||
logger.info("Requesting users list refresh...")
|
logger.info("Requesting users list refresh...")
|
||||||
result = PlexTV().get_full_users_list()
|
result = PlexTV().get_full_users_list()
|
||||||
monitor_db = monitor.MonitorDatabase()
|
monitor_db = database.MonitorDatabase()
|
||||||
|
|
||||||
if len(result) > 0:
|
if len(result) > 0:
|
||||||
for item in result:
|
for item in result:
|
||||||
|
@ -194,7 +194,7 @@ class PlexTV(object):
|
||||||
|
|
||||||
def get_synced_items(self, machine_id=None, user_id=None):
|
def get_synced_items(self, machine_id=None, user_id=None):
|
||||||
sync_list = self.get_plextv_sync_lists(machine_id)
|
sync_list = self.get_plextv_sync_lists(machine_id)
|
||||||
plex_watch = plexwatch.PlexWatch()
|
data_factory = datafactory.DataFactory()
|
||||||
|
|
||||||
synced_items = []
|
synced_items = []
|
||||||
|
|
||||||
|
@ -218,8 +218,8 @@ class PlexTV(object):
|
||||||
for device in sync_device:
|
for device in sync_device:
|
||||||
device_user_id = helpers.get_xml_attr(device, 'userID')
|
device_user_id = helpers.get_xml_attr(device, 'userID')
|
||||||
try:
|
try:
|
||||||
device_username = plex_watch.get_user_details(user_id=device_user_id)['username']
|
device_username = data_factory.get_user_details(user_id=device_user_id)['username']
|
||||||
device_friendly_name = plex_watch.get_user_details(user_id=device_user_id)['friendly_name']
|
device_friendly_name = data_factory.get_user_details(user_id=device_user_id)['friendly_name']
|
||||||
except:
|
except:
|
||||||
device_username = ''
|
device_username = ''
|
||||||
device_friendly_name = ''
|
device_friendly_name = ''
|
||||||
|
|
1069
plexpy/plexwatch.py
1069
plexpy/plexwatch.py
File diff suppressed because it is too large
Load diff
|
@ -13,7 +13,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, helpers, plexwatch, http_handler
|
from plexpy import logger, helpers, datafactory, http_handler
|
||||||
|
|
||||||
import plexpy
|
import plexpy
|
||||||
|
|
||||||
|
@ -495,7 +495,7 @@ class PmsConnect(object):
|
||||||
"""
|
"""
|
||||||
def get_session_each(self, stream_type='', session=None):
|
def get_session_each(self, stream_type='', session=None):
|
||||||
session_output = None
|
session_output = None
|
||||||
plex_watch = plexwatch.PlexWatch()
|
data_factory = datafactory.DataFactory()
|
||||||
if stream_type == 'track':
|
if stream_type == 'track':
|
||||||
|
|
||||||
media_info = session.getElementsByTagName('Media')[0]
|
media_info = session.getElementsByTagName('Media')[0]
|
||||||
|
@ -521,7 +521,7 @@ class PmsConnect(object):
|
||||||
transcode_container = ''
|
transcode_container = ''
|
||||||
transcode_protocol = ''
|
transcode_protocol = ''
|
||||||
|
|
||||||
user_details = plex_watch.get_user_details(
|
user_details = data_factory.get_user_details(
|
||||||
user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title'))
|
user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title'))
|
||||||
|
|
||||||
if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Track'):
|
if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Track'):
|
||||||
|
@ -629,7 +629,7 @@ class PmsConnect(object):
|
||||||
else:
|
else:
|
||||||
use_indexes = 0
|
use_indexes = 0
|
||||||
|
|
||||||
user_details = plex_watch.get_user_details(
|
user_details = data_factory.get_user_details(
|
||||||
user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title'))
|
user=helpers.get_xml_attr(session.getElementsByTagName('User')[0], 'title'))
|
||||||
|
|
||||||
if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Video'):
|
if helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier').endswith('_Video'):
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from plexpy import logger, notifiers, plextv, pmsconnect, plexwatch, db, common, log_reader, datafactory
|
from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory
|
||||||
from plexpy.helpers import checked, radio
|
from plexpy.helpers import checked, radio
|
||||||
|
|
||||||
from mako.lookup import TemplateLookup
|
from mako.lookup import TemplateLookup
|
||||||
|
@ -80,13 +80,6 @@ class WebInterface(object):
|
||||||
cherrypy.response.headers['Content-type'] = 'application/json'
|
cherrypy.response.headers['Content-type'] = 'application/json'
|
||||||
return json.dumps(formats)
|
return json.dumps(formats)
|
||||||
|
|
||||||
@cherrypy.expose
|
|
||||||
def home_stats_old(self, time_range='30', **kwargs):
|
|
||||||
plex_watch = plexwatch.PlexWatch()
|
|
||||||
stats_data = plex_watch.get_home_stats(time_range)
|
|
||||||
|
|
||||||
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
|
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def home_stats(self, time_range='30', **kwargs):
|
def home_stats(self, time_range='30', **kwargs):
|
||||||
data_factory = datafactory.DataFactory()
|
data_factory = datafactory.DataFactory()
|
||||||
|
@ -396,12 +389,12 @@ class WebInterface(object):
|
||||||
# Write the config
|
# Write the config
|
||||||
plexpy.CONFIG.write()
|
plexpy.CONFIG.write()
|
||||||
|
|
||||||
# Check if we have our users table
|
|
||||||
plexwatch.check_db_tables()
|
|
||||||
|
|
||||||
# Reconfigure scheduler
|
# Reconfigure scheduler
|
||||||
plexpy.initialize_scheduler()
|
plexpy.initialize_scheduler()
|
||||||
|
|
||||||
|
# Refresh users table. Probably shouldn't do this on every config save, will improve this later.
|
||||||
|
threading.Thread(target=plextv.refresh_users).start()
|
||||||
|
|
||||||
raise cherrypy.HTTPRedirect("config")
|
raise cherrypy.HTTPRedirect("config")
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
|
@ -432,9 +425,9 @@ class WebInterface(object):
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
def clear_all_history(self, **kwargs):
|
def clear_all_history(self, **kwargs):
|
||||||
from plexpy import monitor
|
from plexpy import database
|
||||||
|
|
||||||
threading.Thread(target=monitor.clear_history_tables).start()
|
threading.Thread(target=database.clear_history_tables).start()
|
||||||
raise cherrypy.HTTPRedirect("config")
|
raise cherrypy.HTTPRedirect("config")
|
||||||
|
|
||||||
@cherrypy.expose
|
@cherrypy.expose
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue