diff --git a/data/interfaces/default/user.html b/data/interfaces/default/user.html
index e18fec08..22f0f1b0 100644
--- a/data/interfaces/default/user.html
+++ b/data/interfaces/default/user.html
@@ -185,17 +185,19 @@ from plexpy import helpers
ID
Time
User
-
Platform
+
Platform
IP Address
Title
-
Started
+
Started
Paused
Stopped
Duration
-
Completed
+
+
grandparentRatingKey
RatingKey
-
+
+
diff --git a/data/interfaces/default/users_new.html b/data/interfaces/default/users_new.html
deleted file mode 100644
index fe79a725..00000000
--- a/data/interfaces/default/users_new.html
+++ /dev/null
@@ -1,69 +0,0 @@
-<%inherit file="base.html"/>
-<%!
-from plexpy import helpers
-%>
-
-<%def name="headIncludes()">
-
-
-%def>
-
-<%def name="body()">
-
-
-
-
-
-
-
-
-
- User
- Last Seen
- Last Known IP
- Total Plays
-
-
-
-
-
-
-
-
-
-
-
-
-%def>
-
-<%def name="javascriptIncludes()">
-
-
-
-
-
-
-%def>
diff --git a/plexpy/__init__.py b/plexpy/__init__.py
index 4fca223e..53dff28b 100644
--- a/plexpy/__init__.py
+++ b/plexpy/__init__.py
@@ -331,40 +331,6 @@ def sig_handler(signum=None, frame=None):
def dbcheck():
- conn = sqlite3.connect(plexpy.CONFIG.PLEXWATCH_DATABASE)
- c = conn.cursor()
- c.execute(
- 'CREATE TABLE IF NOT EXISTS plexpy_users (id INTEGER PRIMARY KEY AUTOINCREMENT, '
- 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL UNIQUE, '
- 'friendly_name TEXT, thumb TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, '
- 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL)'
- )
-
- # Upgrade plexpy_users table from earlier versions
- try:
- c.execute('SELECT user_id from plexpy_users')
- except sqlite3.OperationalError:
- logger.debug(u"Altering database. Updating database table plexpy_users.")
- c.execute(
- 'CREATE TABLE tmp_table (id INTEGER PRIMARY KEY AUTOINCREMENT, '
- 'user_id INTEGER DEFAULT NULL UNIQUE, username TEXT NOT NULL UNIQUE, '
- 'friendly_name TEXT, thumb TEXT, email TEXT, is_home_user INTEGER DEFAULT NULL, '
- 'is_allow_sync INTEGER DEFAULT NULL, is_restricted INTEGER DEFAULT NULL)'
- )
- c.execute(
- 'INSERT INTO tmp_table SELECT id, NULL, username, friendly_name, NULL, NULL, NULL, NULL, NULL '
- 'FROM plexpy_users'
- )
- c.execute(
- 'DROP TABLE plexpy_users'
- )
- c.execute(
- 'ALTER TABLE tmp_table RENAME TO plexpy_users'
- )
-
- conn.commit()
- c.close()
-
conn_db = sqlite3.connect(DB_FILE)
c_db = conn_db.cursor()
@@ -388,13 +354,13 @@ def dbcheck():
'CREATE TABLE IF NOT EXISTS session_history (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'started INTEGER, stopped INTEGER, rating_key INTEGER, user_id INTEGER, user TEXT, '
'ip_address TEXT, paused_counter INTEGER DEFAULT 0, player TEXT, platform TEXT, machine_id TEXT, '
- 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER)'
+ 'parent_rating_key INTEGER, grandparent_rating_key INTEGER, media_type TEXT, view_offset INTEGER DEFAULT 0)'
)
# session_history_media_info table :: This is a table which logs each session's media info
c_db.execute(
'CREATE TABLE IF NOT EXISTS session_history_media_info (id INTEGER PRIMARY KEY, '
- 'rating_key INTEGER, video_decision TEXT, audio_decision TEXT, duration INTEGER, width INTEGER, '
+ 'rating_key INTEGER, video_decision TEXT, audio_decision TEXT, duration INTEGER DEFAULT 0, width INTEGER, '
'height INTEGER, container TEXT, video_codec TEXT, audio_codec TEXT, bitrate INTEGER, video_resolution TEXT, '
'video_framerate TEXT, aspect_ratio TEXT, audio_channels INTEGER, transcode_protocol TEXT, '
'transcode_container TEXT, transcode_video_codec TEXT, transcode_audio_codec TEXT, '
@@ -408,7 +374,7 @@ def dbcheck():
'title TEXT, parent_title TEXT, grandparent_title TEXT, full_title TEXT, media_index INTEGER, '
'parent_media_index INTEGER, thumb TEXT, parent_thumb TEXT, grandparent_thumb TEXT, art TEXT, media_type TEXT, '
'year INTEGER, originally_available_at TEXT, added_at INTEGER, updated_at INTEGER, last_viewed_at INTEGER, '
- 'content_rating TEXT, summary TEXT, rating TEXT, duration INTEGER, guid TEXT, '
+ 'content_rating TEXT, summary TEXT, rating TEXT, duration INTEGER DEFAULT 0, guid TEXT, '
'directors TEXT, writers TEXT, actors TEXT, genres TEXT, studio TEXT)'
''
)
diff --git a/plexpy/datafactory.py b/plexpy/datafactory.py
index 0b1e3a8b..a155a92c 100644
--- a/plexpy/datafactory.py
+++ b/plexpy/datafactory.py
@@ -146,16 +146,16 @@ class DataFactory(object):
t1 + '.started as date',
'(CASE WHEN users.friendly_name IS NULL THEN ' + t1 +
'.user ELSE users.friendly_name END) as friendly_name',
- t1 + '.player',
+ t1 + '.player as platform',
t1 + '.ip_address',
- t2 + '.full_title',
+ t2 + '.full_title as title',
t1 + '.started',
t1 + '.paused_counter',
t1 + '.stopped',
'round((julianday(datetime(' + t1 + '.stopped, "unixepoch", "localtime")) - \
julianday(datetime(' + t1 + '.started, "unixepoch", "localtime"))) * 86400) - \
(CASE WHEN ' + t1 + '.paused_counter IS NULL THEN 0 ELSE ' + t1 + '.paused_counter END) as duration',
- '((CASE WHEN ' + t1 + '.view_offset IS NULL THEN 0.0 ELSE ' + t1 + '.view_offset * 1.0 END) / \
+ '((CASE WHEN ' + t1 + '.view_offset IS NULL THEN 0.1 ELSE ' + t1 + '.view_offset * 1.0 END) / \
(CASE WHEN ' + t2 + '.duration IS NULL THEN 1.0 ELSE ' + t2 + '.duration * 1.0 END) * 100) as percent_complete',
t1 + '.grandparent_rating_key as grandparent_rating_key',
t1 + '.rating_key as rating_key',
@@ -194,14 +194,14 @@ class DataFactory(object):
row = {"id": item['id'],
"date": item['date'],
"friendly_name": item['friendly_name'],
- "platform": item["player"],
+ "platform": item["platform"],
"ip_address": item["ip_address"],
- "title": item["full_title"],
+ "title": item["title"],
"started": item["started"],
"paused_counter": item["paused_counter"],
"stopped": item["stopped"],
"duration": item["duration"],
- "percent_complete": round(item["percent_complete"], 0),
+ "percent_complete": item["percent_complete"],
"grandparent_rating_key": item["grandparent_rating_key"],
"rating_key": item["rating_key"],
"user": item["user"],
@@ -233,6 +233,79 @@ class DataFactory(object):
return dict
+ def get_user_unique_ips(self, start='', length='', kwargs=None, custom_where=''):
+ data_tables = datatables_new.DataTables()
+
+ start = int(start)
+ length = int(length)
+ filtered = []
+ totalcount = 0
+ search_value = ""
+ search_regex = ""
+ order_column = 0
+ order_dir = "desc"
+
+ if 'order[0][dir]' in kwargs:
+ order_dir = kwargs.get('order[0][dir]', "desc")
+
+ if 'order[0][column]' in kwargs:
+ order_column = kwargs.get('order[0][column]', 1)
+
+ if 'search[value]' in kwargs:
+ search_value = kwargs.get('search[value]', "")
+
+ if 'search[regex]' in kwargs:
+ search_regex = kwargs.get('search[regex]', "")
+
+ columns = ['session_history.started as last_seen',
+ 'session_history.ip_address as ip_address',
+ 'COUNT(session_history.ip_address) as play_count',
+ 'session_history.player as platform',
+ 'session_history_metadata.full_title as last_watched',
+ 'session_history.user as user'
+ ]
+
+ try:
+ query = data_tables.ssp_query(table_name='session_history',
+ columns=columns,
+ start=start,
+ length=length,
+ order_column=int(order_column),
+ order_dir=order_dir,
+ search_value=search_value,
+ search_regex=search_regex,
+ custom_where=custom_where,
+ group_by='session_history.ip_address',
+ join_type=['JOIN'],
+ join_table=['session_history_metadata'],
+ join_evals=[['session_history.id', 'session_history_metadata.id']],
+ kwargs=kwargs)
+ except:
+ logger.warn("Unable to execute database query.")
+ return {'recordsFiltered': 0,
+ 'recordsTotal': 0,
+ 'data': 'null'},
+
+ results = query['result']
+
+ rows = []
+ for item in results:
+ row = {"last_seen": item['last_seen'],
+ "ip_address": item['ip_address'],
+ "play_count": item['play_count'],
+ "platform": item['platform'],
+ "last_watched": item['last_watched']
+ }
+
+ rows.append(row)
+
+ dict = {'recordsFiltered': query['filteredCount'],
+ 'recordsTotal': query['totalCount'],
+ 'data': rows,
+ }
+
+ return dict
+
def set_user_friendly_name(self, user=None, friendly_name=None):
if user:
if friendly_name.strip() == '':
@@ -261,3 +334,539 @@ class DataFactory(object):
return user
return None
+
+ def get_user_id(self, user=None):
+ if user:
+ try:
+ monitor_db = monitor.MonitorDatabase()
+ query = 'select user_id FROM users WHERE username = ?'
+ result = monitor_db.select_single(query, args=[user])
+ if result:
+ return result
+ else:
+ return None
+ except:
+ return None
+
+ return None
+
+ def get_user_details(self, user=None, user_id=None):
+ try:
+ monitor_db = monitor.MonitorDatabase()
+
+ if user:
+ query = 'SELECT user_id, username, friendly_name, email, ' \
+ 'thumb, is_home_user, is_allow_sync, is_restricted ' \
+ 'FROM users ' \
+ 'WHERE username = ? ' \
+ 'UNION ALL ' \
+ 'SELECT null, user, null, null, null, null, null, null ' \
+ 'FROM session_history ' \
+ 'WHERE user = ? ' \
+ 'GROUP BY user ' \
+ 'LIMIT 1'
+ result = monitor_db.select(query, args=[user, user])
+ elif user_id:
+ query = 'SELECT user_id, username, friendly_name, email, thumb, ' \
+ 'is_home_user, is_allow_sync, is_restricted FROM users WHERE user_id = ? LIMIT 1'
+ result = monitor_db.select(query, args=[user_id])
+ if result:
+ for item in result:
+ if not item['friendly_name']:
+ friendly_name = item['username']
+ else:
+ friendly_name = item['friendly_name']
+ if not item['thumb'] or item['thumb'] == '':
+ user_thumb = common.DEFAULT_USER_THUMB
+ else:
+ user_thumb = item['thumb']
+
+ user_details = {"user_id": item['user_id'],
+ "username": item['username'],
+ "friendly_name": friendly_name,
+ "email": item['email'],
+ "thumb": user_thumb,
+ "is_home_user": item['is_home_user'],
+ "is_allow_sync": item['is_allow_sync'],
+ "is_restricted": item['is_restricted']
+ }
+ return user_details
+ else:
+ return None
+ except:
+ return None
+
+ return None
+
+ def get_home_stats(self, time_range='30'):
+ monitor_db = monitor.MonitorDatabase()
+
+ if not time_range.isdigit():
+ time_range = '30'
+
+ stats_queries = ["top_tv", "popular_tv", "top_users", "top_platforms"]
+ home_stats = []
+
+ for stat in stats_queries:
+ if 'top_tv' in stat:
+ top_tv = []
+ try:
+ query = 'SELECT session_history_metadata.grandparent_title, ' \
+ 'COUNT(session_history_metadata.grandparent_title) as total_plays, ' \
+ 'session_history_metadata.grandparent_rating_key, ' \
+ 'MAX(session_history.started) as last_watch,' \
+ 'session_history_metadata.grandparent_thumb ' \
+ 'FROM session_history_metadata ' \
+ 'JOIN session_history on session_history_metadata.id = session_history.id ' \
+ 'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
+ '>= datetime("now", "-%s days", "localtime") ' \
+ 'AND session_history_metadata.media_type = "episode" ' \
+ 'GROUP BY session_history_metadata.grandparent_title ' \
+ 'ORDER BY total_plays DESC LIMIT 10' % time_range
+ result = monitor_db.select(query)
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for item in result:
+ row = {'title': item[0],
+ 'total_plays': item[1],
+ 'users_watched': '',
+ 'rating_key': item[2],
+ 'last_play': item[3],
+ 'grandparent_thumb': item[4],
+ 'thumb': '',
+ 'user': '',
+ 'friendly_name': '',
+ 'platform_type': '',
+ 'platform': ''
+ }
+ top_tv.append(row)
+
+ home_stats.append({'stat_id': stat,
+ 'rows': top_tv})
+
+ elif 'popular_tv' in stat:
+ popular_tv = []
+ try:
+ query = 'SELECT session_history_metadata.grandparent_title, ' \
+ 'COUNT(DISTINCT session_history.user_id) as users_watched, ' \
+ 'session_history_metadata.grandparent_rating_key, ' \
+ 'MAX(session_history.started) as last_watch, ' \
+ 'COUNT(session_history.id) as total_plays, ' \
+ 'session_history_metadata.grandparent_thumb ' \
+ 'FROM session_history_metadata ' \
+ 'JOIN session_history ON session_history_metadata.id = session_history.id ' \
+ 'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
+ '>= datetime("now", "-%s days", "localtime") ' \
+ 'AND session_history_metadata.media_type = "episode" ' \
+ 'GROUP BY session_history_metadata.grandparent_title ' \
+ 'ORDER BY users_watched DESC, total_plays DESC ' \
+ 'LIMIT 10' % time_range
+ result = monitor_db.select(query)
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for item in result:
+ row = {'title': item[0],
+ 'users_watched': item[1],
+ 'rating_key': item[2],
+ 'last_play': item[3],
+ 'total_plays': item[4],
+ 'grandparent_thumb': item[5],
+ 'thumb': '',
+ 'user': '',
+ 'friendly_name': '',
+ 'platform_type': '',
+ 'platform': ''
+ }
+ popular_tv.append(row)
+
+ home_stats.append({'stat_id': stat,
+ 'rows': popular_tv})
+
+ elif 'top_users' in stat:
+ top_users = []
+ try:
+ query = 'SELECT session_history.user, ' \
+ '(case when users.friendly_name is null then session_history.user else ' \
+ 'users.friendly_name end) as friendly_name,' \
+ 'COUNT(session_history.id) as total_plays, ' \
+ 'MAX(session_history.started) as last_watch, ' \
+ 'users.thumb ' \
+ 'FROM session_history ' \
+ 'JOIN session_history_metadata ON session_history.id = session_history_metadata.id ' \
+ 'LEFT OUTER JOIN users ON session_history.user_id = users.user_id ' \
+ 'WHERE datetime(session_history.stopped, "unixepoch", "localtime") >= ' \
+ 'datetime("now", "-%s days", "localtime") '\
+ 'GROUP BY session_history.user_id ' \
+ 'ORDER BY total_plays DESC LIMIT 10' % time_range
+ result = monitor_db.select(query)
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for item in result:
+ if not item[4] or item[4] == '':
+ user_thumb = common.DEFAULT_USER_THUMB
+ else:
+ user_thumb = item[4]
+
+ row = {'user': item[0],
+ 'friendly_name': item[1],
+ 'total_plays': item[2],
+ 'last_play': item[3],
+ 'thumb': user_thumb,
+ 'grandparent_thumb': '',
+ 'users_watched': '',
+ 'rating_key': '',
+ 'title': '',
+ 'platform_type': '',
+ 'platform': ''
+ }
+ top_users.append(row)
+
+ home_stats.append({'stat_id': stat,
+ 'rows': top_users})
+
+ elif 'top_platforms' in stat:
+ top_platform = []
+
+ try:
+ query = 'SELECT session_history.platform, ' \
+ 'COUNT(session_history.id) as total_plays, ' \
+ 'MAX(session_history.started) as last_watch ' \
+ 'FROM session_history ' \
+ 'WHERE datetime(session_history.stopped, "unixepoch", "localtime") ' \
+ '>= datetime("now", "-%s days", "localtime") ' \
+ 'GROUP BY session_history.platform ' \
+ 'ORDER BY total_plays DESC' % time_range
+ result = monitor_db.select(query)
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for item in result:
+ row = {'platform': item[0],
+ 'total_plays': item[1],
+ 'last_play': item[2],
+ 'platform_type': item[0],
+ 'title': '',
+ 'thumb': '',
+ 'grandparent_thumb': '',
+ 'users_watched': '',
+ 'rating_key': '',
+ 'user': '',
+ 'friendly_name': ''
+ }
+ top_platform.append(row)
+
+ home_stats.append({'stat_id': stat,
+ 'rows': top_platform})
+
+ return home_stats
+
+ def get_stream_details(self, row_id=None):
+ monitor_db = monitor.MonitorDatabase()
+
+ if row_id:
+ query = 'SELECT container, bitrate, video_resolution, width, height, aspect_ratio, video_framerate, ' \
+ 'video_codec, audio_codec, audio_channels, video_decision, transcode_video_codec, transcode_height, ' \
+ 'transcode_width, audio_decision, transcode_audio_codec, transcode_audio_channels, media_type, ' \
+ 'title, grandparent_title ' \
+ 'from session_history_media_info ' \
+ 'join session_history_metadata on session_history_media_info.id = session_history_metadata.id ' \
+ 'where session_history_media_info.id = ?'
+ result = monitor_db.select(query, args=[row_id])
+ else:
+ return None
+
+ print result
+ stream_output = {}
+
+ for item in result:
+ stream_output = {'container': item[0],
+ 'bitrate': item[1],
+ 'video_resolution': item[2],
+ 'width': item[3],
+ 'height': item[4],
+ 'aspect_ratio': item[5],
+ 'video_framerate': item[6],
+ 'video_codec': item[7],
+ 'audio_codec': item[8],
+ 'audio_channels': item[9],
+ 'transcode_video_dec': item[10],
+ 'transcode_video_codec': item[11],
+ 'transcode_height': item[12],
+ 'transcode_width': item[13],
+ 'transcode_audio_dec': item[14],
+ 'transcode_audio_codec': item[15],
+ 'transcode_audio_channels': item[16],
+ 'media_type': item[17],
+ 'title': item[18],
+ 'grandparent_title': item[19]
+ }
+
+ return stream_output
+
+ def get_recently_watched(self, user=None, limit='10'):
+ monitor_db = monitor.MonitorDatabase()
+ recently_watched = []
+
+ if not limit.isdigit():
+ limit = '10'
+
+ try:
+ if user:
+ query = 'SELECT session_history.media_type, session_history.rating_key, title, thumb, parent_thumb, ' \
+ 'media_index, parent_media_index, year, started, user ' \
+ 'FROM session_history_metadata ' \
+ 'JOIN session_history ON session_history_metadata.id = session_history.id ' \
+ 'WHERE user = ? ORDER BY started DESC LIMIT ?'
+ result = monitor_db.select(query, args=[user, limit])
+ else:
+ query = 'SELECT session_history.media_type, session_history.rating_key, title, thumb, parent_thumb, ' \
+ 'media_index, parent_media_index, year, started, user ' \
+ 'FROM session_history_metadata ' \
+ 'JOIN session_history ON session_history_metadata.id = session_history.id ' \
+ 'ORDER BY started DESC LIMIT ?'
+ result = monitor_db.select(query, args=[limit])
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for row in result:
+ if row[0] == 'episode':
+ thumb = row[4]
+ else:
+ thumb = row[3]
+
+ recent_output = {'type': row[0],
+ 'rating_key': row[1],
+ 'title': row[2],
+ 'thumb': thumb,
+ 'index': row[5],
+ 'parentIndex': row[6],
+ 'year': row[7],
+ 'time': row[8],
+ 'user': row[9]
+ }
+ recently_watched.append(recent_output)
+
+ return recently_watched
+
+ def get_user_watch_time_stats(self, user=None):
+ monitor_db = monitor.MonitorDatabase()
+
+ time_queries = [1, 7, 30, 0]
+ user_watch_time_stats = []
+
+ for days in time_queries:
+ if days > 0:
+ query = 'SELECT (SUM(stopped - started) - ' \
+ 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \
+ 'COUNT(id) AS total_plays ' \
+ 'FROM session_history ' \
+ 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \
+ 'AND user = ?' % days
+ result = monitor_db.select(query, args=[user])
+ else:
+ query = 'SELECT (SUM(stopped - started) - ' \
+ 'SUM(CASE WHEN paused_counter is null THEN 0 ELSE paused_counter END)) as total_time, ' \
+ 'COUNT(id) AS total_plays ' \
+ 'FROM session_history ' \
+ 'WHERE user = ?'
+ result = monitor_db.select(query, args=[user])
+
+ for item in result:
+ if item[0]:
+ total_time = item[0]
+ total_plays = item[1]
+ else:
+ total_time = 0
+ total_plays = 0
+
+ row = {'query_days': days,
+ 'total_time': total_time,
+ 'total_plays': total_plays
+ }
+
+ user_watch_time_stats.append(row)
+
+ return user_watch_time_stats
+
+ def get_user_platform_stats(self, user=None):
+ monitor_db = monitor.MonitorDatabase()
+
+ platform_stats = []
+ result_id = 0
+
+ try:
+ query = 'SELECT player, COUNT(player) as player_count, platform ' \
+ 'FROM session_history ' \
+ 'WHERE user = ? ' \
+ 'GROUP BY player ' \
+ 'ORDER BY player_count DESC'
+ result = monitor_db.select(query, args=[user])
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ for item in result:
+ row = {'platform_name': item[0],
+ 'platform_type': item[2],
+ 'total_plays': item[1],
+ 'result_id': result_id
+ }
+ platform_stats.append(row)
+ result_id += 1
+
+ return platform_stats
+
+ def get_total_plays_per_day(self, time_range='30'):
+ monitor_db = monitor.MonitorDatabase()
+
+ if not time_range.isdigit():
+ time_range = '30'
+
+ try:
+ query = 'SELECT date(started, "unixepoch", "localtime") as date_played, ' \
+ 'SUM(case when media_type = "episode" then 1 else 0 end) as tv_count, ' \
+ 'SUM(case when media_type = "movie" then 1 else 0 end) as movie_count ' \
+ 'FROM session_history ' \
+ 'WHERE datetime(stopped, "unixepoch", "localtime") >= datetime("now", "-%s days", "localtime") ' \
+ 'GROUP BY date_played ' \
+ 'ORDER BY started ASC' % time_range
+
+ result = monitor_db.select(query)
+ except:
+ logger.warn("Unable to execute database query.")
+ return None
+
+ # create our date range as some days may not have any data
+ # but we still want to display them
+ base = datetime.date.today()
+ date_list = [base - datetime.timedelta(days=x) for x in range(0, int(time_range))]
+
+ categories = []
+ series_1 = []
+ series_2 = []
+
+ for date_item in sorted(date_list):
+ date_string = date_item.strftime('%Y-%m-%d')
+ categories.append(date_string)
+ series_1_value = 0
+ series_2_value = 0
+ for item in result:
+ if date_string == item[0]:
+ series_1_value = item[1]
+ series_2_value = item[2]
+ break
+ else:
+ series_1_value = 0
+ series_2_value = 0
+
+ series_1.append(series_1_value)
+ series_2.append(series_2_value)
+
+ series_1_output = {'name': 'TV',
+ 'data': series_1}
+ series_2_output = {'name': 'Movies',
+ 'data': series_2}
+
+ output = {'categories': categories,
+ 'series': [series_1_output, series_2_output]}
+ return output
+
+ def get_total_plays_per_dayofweek(self, time_range='30'):
+ monitor_db = monitor.MonitorDatabase()
+
+ if not time_range.isdigit():
+ time_range = '30'
+
+ query = 'SELECT strftime("%w", datetime(started, "unixepoch", "localtime")) as daynumber, ' \
+ 'case cast (strftime("%w", datetime(started, "unixepoch", "localtime")) as integer) ' \
+ 'when 0 then "Sunday" ' \
+ 'when 1 then "Monday" ' \
+ 'when 2 then "Tuesday" ' \
+ 'when 3 then "Wednesday" ' \
+ 'when 4 then "Thursday" ' \
+ 'when 5 then "Friday" ' \
+ 'else "Saturday" end as dayofweek, ' \
+ 'COUNT(id) as total_plays ' \
+ 'from session_history ' \
+ 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \
+ 'datetime("now", "-' + time_range + ' days", "localtime") ' \
+ 'GROUP BY dayofweek ' \
+ 'ORDER BY daynumber'
+
+ result = monitor_db.select(query)
+
+ days_list = ['Sunday', 'Monday', 'Tuesday', 'Wednesday',
+ 'Thursday', 'Friday', 'Saturday']
+
+ categories = []
+ series_1 = []
+
+ for day_item in days_list:
+ categories.append(day_item)
+ series_1_value = 0
+ for item in result:
+ if day_item == item[1]:
+ series_1_value = item[2]
+ break
+ else:
+ series_1_value = 0
+
+ series_1.append(series_1_value)
+
+ series_1_output = {'name': 'Total plays',
+ 'data': series_1}
+
+ output = {'categories': categories,
+ 'series': [series_1_output]}
+ return output
+
+ def get_total_plays_per_hourofday(self, time_range='30'):
+ monitor_db = monitor.MonitorDatabase()
+
+ if not time_range.isdigit():
+ time_range = '30'
+
+ query = 'select strftime("%H", datetime(started, "unixepoch", "localtime")) as hourofday, ' \
+ 'COUNT(id) ' \
+ 'FROM session_history ' \
+ 'WHERE datetime(stopped, "unixepoch", "localtime") >= ' \
+ 'datetime("now", "-' + time_range + ' days", "localtime") ' \
+ 'GROUP BY hourofday ' \
+ 'ORDER BY hourofday'
+
+ result = monitor_db.select(query)
+
+ hours_list = ['00','01','02','03','04','05',
+ '06','07','08','09','10','11',
+ '12','13','14','15','16','17',
+ '18','19','20','21','22','23']
+
+ categories = []
+ series_1 = []
+
+ for hour_item in hours_list:
+ categories.append(hour_item)
+ series_1_value = 0
+ for item in result:
+ if hour_item == item[0]:
+ series_1_value = item[1]
+ break
+ else:
+ series_1_value = 0
+
+ series_1.append(series_1_value)
+
+ series_1_output = {'name': 'Total plays',
+ 'data': series_1}
+
+ output = {'categories': categories,
+ 'series': [series_1_output]}
+ return output
\ No newline at end of file
diff --git a/plexpy/monitor.py b/plexpy/monitor.py
index c396d42e..fb31b644 100644
--- a/plexpy/monitor.py
+++ b/plexpy/monitor.py
@@ -163,7 +163,7 @@ class MonitorDatabase(object):
self.connection.execute("PRAGMA cache_size=-%s" % (get_cache_size() * 1024))
self.connection.row_factory = sqlite3.Row
- def action(self, query, args=None):
+ def action(self, query, args=None, return_last_id=False):
if query is None:
return
@@ -302,56 +302,74 @@ class MonitorProcessing(object):
# If it's our first write then time stamp it.
self.db.upsert('sessions', timestamp, keys)
- def write_session_history(self, session=None):
+ def write_session_history(self, session=None, import_metadata=None, is_import=False, import_ignore_interval=0):
if session:
logging_enabled = False
+ if is_import:
+ if str(session['stopped']).isdigit():
+ stopped = session['stopped']
+ else:
+ stopped = int(time.time())
+ else:
+ stopped = int(time.time())
+
if plexpy.CONFIG.VIDEO_LOGGING_ENABLE and \
(session['media_type'] == 'movie' or session['media_type'] == 'episode'):
logging_enabled = True
-
- if plexpy.CONFIG.MUSIC_LOGGING_ENABLE and \
+ elif plexpy.CONFIG.MUSIC_LOGGING_ENABLE and \
session['media_type'] == 'track':
logging_enabled = True
+ else:
+ logger.debug(u"PlexPy Monitor :: ratingKey %s not logged. Does not meet logging criteria. "
+ u"Media type is '%s'" % (session['rating_key'], session['media_type']))
- if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL:
+ if plexpy.CONFIG.LOGGING_IGNORE_INTERVAL and not is_import:
if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
- (int(time.time()) - session['started'] < plexpy.CONFIG.LOGGING_IGNORE_INTERVAL):
+ (int(stopped) - session['started'] < int(plexpy.CONFIG.LOGGING_IGNORE_INTERVAL)):
logging_enabled = False
- logger.debug(u"PlexPy Monitor :: Item played for %s seconds which is less than %s seconds, "
- u"so we're not logging it." %
- (str(int(time.time()) - session['started']), plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
+ logger.debug(u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s "
+ u"seconds, so we're not logging it." %
+ (session['rating_key'], str(int(stopped) - session['started']),
+ plexpy.CONFIG.LOGGING_IGNORE_INTERVAL))
+ elif is_import and import_ignore_interval:
+ if (session['media_type'] == 'movie' or session['media_type'] == 'episode') and \
+ (int(stopped) - session['started'] < int(import_ignore_interval)):
+ logging_enabled = False
+ logger.debug(u"PlexPy Monitor :: Play duration for ratingKey %s is %s secs which is less than %s "
+ u"seconds, so we're not logging it." %
+ (session['rating_key'], str(int(stopped) - session['started']),
+ import_ignore_interval))
if logging_enabled:
- logger.debug(u"PlexPy Monitor :: Attempting to write to session_history table...")
+ # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history table...")
query = 'INSERT INTO session_history (started, stopped, rating_key, parent_rating_key, ' \
'grandparent_rating_key, media_type, user_id, user, ip_address, paused_counter, player, ' \
'platform, machine_id, view_offset) VALUES ' \
'(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
- args = [session['started'], int(time.time()), session['rating_key'], session['parent_rating_key'],
+ args = [session['started'], stopped, session['rating_key'], session['parent_rating_key'],
session['grandparent_rating_key'], session['media_type'], session['user_id'], session['user'],
session['ip_address'], session['paused_counter'], session['player'], session['platform'],
session['machine_id'], session['view_offset']]
- logger.debug(u"PlexPy Monitor :: Writing session_history transaction...")
+ # logger.debug(u"PlexPy Monitor :: Writing session_history transaction...")
self.db.action(query=query, args=args)
- # Get the id for the last transaction
- last_id = self.db.select_single('SELECT max(id) FROM session_history')
- logger.debug(u"PlexPy Monitor :: Successfully written history item, last id for session_history is %s"
- % last_id)
+ # logger.debug(u"PlexPy Monitor :: Successfully written history item, last id for session_history is %s"
+ # % last_id)
- logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_media_info table...")
+ # Write the session_history_media_info table
+ # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_media_info table...")
query = 'INSERT INTO session_history_media_info (id, rating_key, video_decision, audio_decision, ' \
'duration, width, height, container, video_codec, audio_codec, bitrate, video_resolution, ' \
'video_framerate, aspect_ratio, audio_channels, transcode_protocol, transcode_container, ' \
'transcode_video_codec, transcode_audio_codec, transcode_audio_channels, transcode_width, ' \
'transcode_height) VALUES ' \
- '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
+ '(last_insert_rowid(), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
- args = [last_id, session['rating_key'], session['video_decision'], session['audio_decision'],
+ args = [session['rating_key'], session['video_decision'], session['audio_decision'],
session['duration'], session['width'], session['height'], session['container'],
session['video_codec'], session['audio_codec'], session['bitrate'],
session['video_resolution'], session['video_framerate'], session['aspect_ratio'],
@@ -359,15 +377,18 @@ class MonitorProcessing(object):
session['transcode_video_codec'], session['transcode_audio_codec'],
session['transcode_audio_channels'], session['transcode_width'], session['transcode_height']]
- logger.debug(u"PlexPy Monitor :: Writing session_history_media_info transaction...")
+ # logger.debug(u"PlexPy Monitor :: Writing session_history_media_info transaction...")
self.db.action(query=query, args=args)
- logger.debug(u"PlexPy Monitor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
- pms_connect = pmsconnect.PmsConnect()
- result = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
-
- metadata = result['metadata']
+ if not is_import:
+ logger.debug(u"PlexPy Monitor :: Fetching metadata for item ratingKey %s" % session['rating_key'])
+ pms_connect = pmsconnect.PmsConnect()
+ result = pms_connect.get_metadata_details(rating_key=str(session['rating_key']))
+ metadata = result['metadata']
+ else:
+ metadata = import_metadata
+ # Write the session_history_metadata table
directors = ";".join(metadata['directors'])
writers = ";".join(metadata['writers'])
actors = ";".join(metadata['actors'])
@@ -381,15 +402,16 @@ class MonitorProcessing(object):
else:
full_title = metadata['title']
- logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_metadata table...")
+ # logger.debug(u"PlexPy Monitor :: Attempting to write to session_history_metadata table...")
query = 'INSERT INTO session_history_metadata (id, rating_key, parent_rating_key, ' \
'grandparent_rating_key, title, parent_title, grandparent_title, full_title, media_index, ' \
'parent_media_index, thumb, parent_thumb, grandparent_thumb, art, media_type, year, ' \
'originally_available_at, added_at, updated_at, last_viewed_at, content_rating, summary, ' \
'rating, duration, guid, directors, writers, actors, genres, studio) VALUES ' \
- '(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
+ '(last_insert_rowid(), ' \
+ '?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'
- args = [last_id, session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'],
+ args = [session['rating_key'], session['parent_rating_key'], session['grandparent_rating_key'],
session['title'], session['parent_title'], session['grandparent_title'], full_title,
metadata['index'], metadata['parent_index'], metadata['thumb'], metadata['parent_thumb'],
metadata['grandparent_thumb'], metadata['art'], session['media_type'], metadata['year'],
@@ -397,7 +419,7 @@ class MonitorProcessing(object):
metadata['last_viewed_at'], metadata['content_rating'], metadata['summary'], metadata['rating'],
metadata['duration'], metadata['guid'], directors, writers, actors, genres, metadata['studio']]
- logger.debug(u"PlexPy Monitor :: Writing session_history_metadata transaction...")
+ # logger.debug(u"PlexPy Monitor :: Writing session_history_metadata transaction...")
self.db.action(query=query, args=args)
def find_session_ip(self, rating_key=None, machine_id=None):
diff --git a/plexpy/plextv.py b/plexpy/plextv.py
index f998dd60..62458bc1 100644
--- a/plexpy/plextv.py
+++ b/plexpy/plextv.py
@@ -23,7 +23,6 @@ import plexpy
def refresh_users():
logger.info("Requesting users list refresh...")
result = PlexTV().get_full_users_list()
- pw_db = db.DBConnection()
monitor_db = monitor.MonitorDatabase()
if len(result) > 0:
@@ -38,7 +37,6 @@ def refresh_users():
"is_restricted": item['is_restricted']
}
- pw_db.upsert('plexpy_users', new_value_dict, control_value_dict)
monitor_db.upsert('users', new_value_dict, control_value_dict)
logger.info("Users list refreshed.")
diff --git a/plexpy/plexwatch.py b/plexpy/plexwatch.py
index d39d9ffd..2d5d5af4 100644
--- a/plexpy/plexwatch.py
+++ b/plexpy/plexwatch.py
@@ -972,6 +972,21 @@ class PlexWatch(object):
return None
+ def get_user_id(self, user=None):
+ if user:
+ try:
+ myDB = db.DBConnection()
+ query = 'select user_id FROM plexpy_users WHERE username = ?'
+ result = myDB.select_single(query, args=[user])
+ if result:
+ return result
+ else:
+ return None
+ except:
+ return None
+
+ return None
+
def get_user_details(self, user=None, user_id=None):
try:
myDB = db.DBConnection()
diff --git a/plexpy/plexwatch_import.py b/plexpy/plexwatch_import.py
new file mode 100644
index 00000000..82ac33ce
--- /dev/null
+++ b/plexpy/plexwatch_import.py
@@ -0,0 +1,368 @@
+# This file is part of PlexPy.
+#
+# PlexPy is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# PlexPy is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with PlexPy. If not, see .
+
+import sqlite3
+
+from plexpy import logger, helpers, monitor, datafactory, plextv
+from xml.dom import minidom
+
+import plexpy
+
+def extract_plexwatch_xml(xml=None):
+ output = {}
+ clean_xml = helpers.latinToAscii(xml)
+ try:
+ xml_parse = minidom.parseString(clean_xml)
+ except:
+ logger.warn("Error parsing XML for Plexwatch database.")
+ return None
+
+ xml_head = xml_parse.getElementsByTagName('opt')
+ if not xml_head:
+ logger.warn("Error parsing XML for Plexwatch database.")
+ return None
+
+ for a in xml_head:
+ added_at = helpers.get_xml_attr(a, 'addedAt')
+ art = helpers.get_xml_attr(a, 'art')
+ duration = helpers.get_xml_attr(a, 'duration')
+ grandparent_thumb = helpers.get_xml_attr(a, 'grandparentThumb')
+ grandparent_title = helpers.get_xml_attr(a, 'grandparentTitle')
+ guid = helpers.get_xml_attr(a, 'guid')
+ media_index = helpers.get_xml_attr(a, 'index')
+ originally_available_at = helpers.get_xml_attr(a, 'originallyAvailableAt')
+ last_viewed_at = helpers.get_xml_attr(a, 'lastViewedAt')
+ parent_media_index = helpers.get_xml_attr(a, 'parentIndex')
+ parent_thumb = helpers.get_xml_attr(a, 'parent_thumb')
+ rating = helpers.get_xml_attr(a, 'rating')
+ thumb = helpers.get_xml_attr(a, 'thumb')
+ media_type = helpers.get_xml_attr(a, 'type')
+ updated_at = helpers.get_xml_attr(a, 'updatedAt')
+ view_offset = helpers.get_xml_attr(a, 'viewOffset')
+ year = helpers.get_xml_attr(a, 'year')
+ parent_title = helpers.get_xml_attr(a, 'parentTitle')
+ studio = helpers.get_xml_attr(a, 'studio')
+ title = helpers.get_xml_attr(a, 'title')
+
+ directors = []
+ if a.getElementsByTagName('Director'):
+ director_elem = a.getElementsByTagName('Director')
+ for b in director_elem:
+ directors.append(helpers.get_xml_attr(b, 'tag'))
+
+ aspect_ratio = ''
+ audio_channels = None
+ audio_codec = ''
+ bitrate = None
+ container = ''
+ height = None
+ video_codec = ''
+ video_framerate = ''
+ video_resolution = ''
+ width = None
+
+ if a.getElementsByTagName('Media'):
+ media_elem = a.getElementsByTagName('Media')
+ for c in media_elem:
+ aspect_ratio = helpers.get_xml_attr(c, 'aspectRatio')
+ audio_channels = helpers.get_xml_attr(c, 'audioChannels')
+ audio_codec = helpers.get_xml_attr(c, 'audioCodec')
+ bitrate = helpers.get_xml_attr(c, 'bitrate')
+ container = helpers.get_xml_attr(c, 'container')
+ height = helpers.get_xml_attr(c, 'height')
+ video_codec = helpers.get_xml_attr(c, 'videoCodec')
+ video_framerate = helpers.get_xml_attr(c, 'videoFrameRate')
+ video_resolution = helpers.get_xml_attr(c, 'videoResolution')
+ width = helpers.get_xml_attr(c, 'width')
+
+ machine_id = ''
+ platform = ''
+ player = ''
+
+ if a.getElementsByTagName('Player'):
+ player_elem = a.getElementsByTagName('Player')
+ for d in player_elem:
+ machine_id = helpers.get_xml_attr(d, 'machineIdentifier')
+ platform = helpers.get_xml_attr(d, 'platform')
+ player = helpers.get_xml_attr(d, 'title')
+
+ transcode_audio_channels = None
+ transcode_audio_codec = ''
+ audio_decision = 'direct play'
+ transcode_container = ''
+ transcode_height = None
+ transcode_protocol = ''
+ transcode_video_codec = ''
+ video_decision = 'direct play'
+ transcode_width = None
+
+ if a.getElementsByTagName('TranscodeSession'):
+ transcode_elem = a.getElementsByTagName('TranscodeSession')
+ for e in transcode_elem:
+ transcode_audio_channels = helpers.get_xml_attr(e, 'audioChannels')
+ transcode_audio_codec = helpers.get_xml_attr(e, 'audioCodec')
+ audio_decision = helpers.get_xml_attr(e, 'audioDecision')
+ transcode_container = helpers.get_xml_attr(e, 'container')
+ transcode_height = helpers.get_xml_attr(e, 'height')
+ transcode_protocol = helpers.get_xml_attr(e, 'protocol')
+ transcode_video_codec = helpers.get_xml_attr(e, 'videoCodec')
+ video_decision = helpers.get_xml_attr(e, 'videoDecision')
+ transcode_width = helpers.get_xml_attr(e, 'width')
+
+ user_id = None
+
+ if a.getElementsByTagName('User'):
+ user_elem = a.getElementsByTagName('User')
+ for f in user_elem:
+ user_id = helpers.get_xml_attr(f, 'id')
+
+ writers = []
+ if a.getElementsByTagName('Writer'):
+ writer_elem = a.getElementsByTagName('Writer')
+ for g in writer_elem:
+ writers.append(helpers.get_xml_attr(g, 'tag'))
+
+ actors = []
+ if a.getElementsByTagName('Role'):
+ actor_elem = a.getElementsByTagName('Role')
+ for h in actor_elem:
+ actors.append(helpers.get_xml_attr(h, 'tag'))
+
+ genres = []
+ if a.getElementsByTagName('Genre'):
+ genre_elem = a.getElementsByTagName('Genre')
+ for i in genre_elem:
+ genres.append(helpers.get_xml_attr(i, 'tag'))
+
+ output = {'added_at': added_at,
+ 'art': art,
+ 'duration': duration,
+ 'grandparent_thumb': grandparent_thumb,
+ 'grandparent_title': grandparent_title,
+ 'parent_title': parent_title,
+ 'title': title,
+ 'guid': guid,
+ 'media_index': media_index,
+ 'originally_available_at': originally_available_at,
+ 'last_viewed_at': last_viewed_at,
+ 'parent_media_index': parent_media_index,
+ 'parent_thumb': parent_thumb,
+ 'rating': rating,
+ 'thumb': thumb,
+ 'media_type': media_type,
+ 'updated_at': updated_at,
+ 'view_offset': view_offset,
+ 'year': year,
+ 'directors': directors,
+ 'aspect_ratio': aspect_ratio,
+ 'audio_channels': audio_channels,
+ 'audio_codec': audio_codec,
+ 'bitrate': bitrate,
+ 'container': container,
+ 'height': height,
+ 'video_codec': video_codec,
+ 'video_framerate': video_framerate,
+ 'video_resolution': video_resolution,
+ 'width': width,
+ 'machine_id': machine_id,
+ 'platform': platform,
+ 'player': player,
+ 'transcode_audio_channels': transcode_audio_channels,
+ 'transcode_audio_codec': transcode_audio_codec,
+ 'audio_decision': audio_decision,
+ 'transcode_container': transcode_container,
+ 'transcode_height': transcode_height,
+ 'transcode_protocol': transcode_protocol,
+ 'transcode_video_codec': transcode_video_codec,
+ 'video_decision': video_decision,
+ 'transcode_width': transcode_width,
+ 'user_id': user_id,
+ 'writers': writers,
+ 'actors': actors,
+ 'genres': genres,
+ 'studio': studio
+ }
+
+ return output
+
+def validate_database(database=None, table_name=None):
+ try:
+ connection = sqlite3.connect(database, timeout=20)
+ except sqlite3.OperationalError:
+ logger.error('PlexPy Importer :: Invalid database specified.')
+ return 'Invalid database specified.'
+ except ValueError:
+ logger.error('PlexPy Importer :: Invalid database specified.')
+ return 'Invalid database specified.'
+
+ try:
+ connection.execute('SELECT ratingKey from %s' % table_name)
+ connection.close()
+ except sqlite3.OperationalError:
+ logger.error('PlexPy Importer :: Invalid database specified.')
+ return 'Invalid database specified.'
+
+ return 'success'
+
+def import_from_plexwatch(database=None, table_name=None, import_ignore_interval=0):
+
+ try:
+ connection = sqlite3.connect(database, timeout=20)
+ except sqlite3.OperationalError:
+ logger.error('PlexPy Importer :: Invalid filename.')
+ return None
+ except ValueError:
+ logger.error('PlexPy Importer :: Invalid filename.')
+ return None
+
+ try:
+ connection.execute('SELECT ratingKey from %s' % table_name)
+ except sqlite3.OperationalError:
+ logger.error('PlexPy Importer :: Database specified does not contain the required fields.')
+ return None
+
+ logger.debug(u"PlexPy Importer :: PlexWatch data import in progress...")
+
+ logger.debug(u"PlexPy Importer :: Disabling monitoring while import in progress.")
+ plexpy.schedule_job(monitor.check_active_sessions, 'Check for active sessions', hours=0, minutes=0, seconds=0)
+
+ monitor_processing = monitor.MonitorProcessing()
+ data_factory = datafactory.DataFactory()
+
+ # Get the latest friends list so we can pull user id's
+ try:
+ plextv.refresh_users()
+ except:
+ logger.debug(u"PlexPy Importer :: Unable to refresh the users list. Aborting import.")
+ return None
+
+ query = 'SELECT time AS started, ' \
+ 'stopped, ' \
+ 'ratingKey AS rating_key, ' \
+ 'null AS user_id, ' \
+ 'user, ' \
+ 'ip_address, ' \
+ 'paused_counter, ' \
+ 'platform AS player, ' \
+ 'null AS platform, ' \
+ 'null as machine_id, ' \
+ 'parentRatingKey as parent_rating_key, ' \
+ 'grandparentRatingKey as grandparent_rating_key, ' \
+ 'null AS media_type, ' \
+ 'null AS view_offset, ' \
+ 'xml, ' \
+ 'rating as content_rating,' \
+ 'summary,' \
+ 'title AS full_title,' \
+ 'orig_title AS title, ' \
+ 'orig_title_ep AS grandparent_title ' \
+ 'FROM ' + table_name + ' ORDER BY id'
+
+ result = connection.execute(query)
+
+ for row in result:
+ # Extract the xml from the Plexwatch db xml field.
+ extracted_xml = extract_plexwatch_xml(row[14])
+
+ # If the user_id no longer exists in the friends list, pull it from the xml.
+ if data_factory.get_user_id(user=row[4]):
+ user_id = data_factory.get_user_id(user=row[4])
+ else:
+ user_id = extracted_xml['user_id']
+
+ session_history = {'started': row[0],
+ 'stopped': row[1],
+ 'rating_key': row[2],
+ 'title': extracted_xml['title'],
+ 'parent_title': extracted_xml['parent_title'],
+ 'grandparent_title': extracted_xml['grandparent_title'],
+ 'user_id': user_id,
+ 'user': row[4],
+ 'ip_address': row[5],
+ 'paused_counter': row[6],
+ 'player': row[7],
+ 'platform': extracted_xml['platform'],
+ 'machine_id': extracted_xml['machine_id'],
+ 'parent_rating_key': row[10],
+ 'grandparent_rating_key': row[11],
+ 'media_type': extracted_xml['media_type'],
+ 'view_offset': extracted_xml['view_offset'],
+ 'video_decision': extracted_xml['video_decision'],
+ 'audio_decision': extracted_xml['audio_decision'],
+ 'duration': extracted_xml['duration'],
+ 'width': extracted_xml['width'],
+ 'height': extracted_xml['height'],
+ 'container': extracted_xml['container'],
+ 'video_codec': extracted_xml['video_codec'],
+ 'audio_codec': extracted_xml['audio_codec'],
+ 'bitrate': extracted_xml['bitrate'],
+ 'video_resolution': extracted_xml['video_resolution'],
+ 'video_framerate': extracted_xml['video_framerate'],
+ 'aspect_ratio': extracted_xml['aspect_ratio'],
+ 'audio_channels': extracted_xml['audio_channels'],
+ 'transcode_protocol': extracted_xml['transcode_protocol'],
+ 'transcode_container': extracted_xml['transcode_container'],
+ 'transcode_video_codec': extracted_xml['transcode_video_codec'],
+ 'transcode_audio_codec': extracted_xml['transcode_audio_codec'],
+ 'transcode_audio_channels': extracted_xml['transcode_audio_channels'],
+ 'transcode_width': extracted_xml['transcode_width'],
+ 'transcode_height': extracted_xml['transcode_height']
+ }
+
+ session_history_metadata = {'rating_key': row[2],
+ 'parent_rating_key': row[10],
+ 'grandparent_rating_key': row[11],
+ 'title': extracted_xml['title'],
+ 'parent_title': extracted_xml['parent_title'],
+ 'grandparent_title': extracted_xml['grandparent_title'],
+ 'index': extracted_xml['media_index'],
+ 'parent_index': extracted_xml['parent_media_index'],
+ 'thumb': extracted_xml['thumb'],
+ 'parent_thumb': extracted_xml['parent_thumb'],
+ 'grandparent_thumb': extracted_xml['grandparent_thumb'],
+ 'art': extracted_xml['art'],
+ 'media_type': extracted_xml['media_type'],
+ 'year': extracted_xml['year'],
+ 'originally_available_at': extracted_xml['originally_available_at'],
+ 'added_at': extracted_xml['added_at'],
+ 'updated_at': extracted_xml['updated_at'],
+ 'last_viewed_at': extracted_xml['last_viewed_at'],
+ 'content_rating': row[15],
+ 'summary': row[16],
+ 'rating': extracted_xml['rating'],
+ 'duration': extracted_xml['duration'],
+ 'guid': extracted_xml['guid'],
+ 'directors': extracted_xml['directors'],
+ 'writers': extracted_xml['writers'],
+ 'actors': extracted_xml['actors'],
+ 'genres': extracted_xml['genres'],
+ 'studio': extracted_xml['studio'],
+ 'full_title': row[17]
+ }
+
+ # On older versions of PMS, "clip" items were still classified as "movie" and had bad ratingKey values
+ # Just make sure that the ratingKey is indeed an integer
+ if str(row[2]).isdigit():
+ monitor_processing.write_session_history(session=session_history,
+ import_metadata=session_history_metadata,
+ is_import=True,
+ import_ignore_interval=import_ignore_interval)
+ else:
+ logger.debug(u"PlexPy Importer :: Item has bad rating_key: %s" % str(row[2]))
+
+ logger.debug(u"PlexPy Importer :: PlexWatch data import complete.")
+
+ logger.debug(u"PlexPy Importer :: Re-enabling monitoring.")
+ plexpy.initialize_scheduler()
diff --git a/plexpy/webserve.py b/plexpy/webserve.py
index 8c9efa54..e1b83b3b 100644
--- a/plexpy/webserve.py
+++ b/plexpy/webserve.py
@@ -13,13 +13,14 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see .
-from plexpy import logger, notifiers, plextv, pmsconnect, plexwatch, db, common, log_reader, datafactory, monitor
+from plexpy import logger, notifiers, plextv, pmsconnect, plexwatch, db, common, log_reader, datafactory
from plexpy.helpers import checked, radio
from mako.lookup import TemplateLookup
from mako import exceptions
import plexpy
+import threading
import cherrypy
import hashlib
import random
@@ -60,10 +61,7 @@ class WebInterface(object):
@cherrypy.expose
def home(self):
- if plexpy.CONFIG.PLEXWATCH_DATABASE == '':
- raise cherrypy.HTTPRedirect("config")
- else:
- return serve_template(templatename="index.html", title="Home")
+ return serve_template(templatename="index.html", title="Home")
@cherrypy.expose
def get_date_formats(self):
@@ -83,28 +81,27 @@ class WebInterface(object):
return json.dumps(formats)
@cherrypy.expose
- def home_stats(self, time_range='30', **kwargs):
+ def home_stats_old(self, time_range='30', **kwargs):
plex_watch = plexwatch.PlexWatch()
stats_data = plex_watch.get_home_stats(time_range)
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
+ @cherrypy.expose
+ def home_stats(self, time_range='30', **kwargs):
+ data_factory = datafactory.DataFactory()
+ stats_data = data_factory.get_home_stats(time_range=time_range)
+
+ return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
+
@cherrypy.expose
def history(self):
return serve_template(templatename="history.html", title="History")
- @cherrypy.expose
- def history_new(self):
- return serve_template(templatename="history_new.html", title="History")
-
@cherrypy.expose
def users(self):
return serve_template(templatename="users.html", title="Users")
- @cherrypy.expose
- def users_new(self):
- return serve_template(templatename="users_new.html", title="Users")
-
@cherrypy.expose
def graphs(self):
return serve_template(templatename="graphs.html", title="Graphs")
@@ -116,8 +113,8 @@ class WebInterface(object):
@cherrypy.expose
def user(self, user=None):
try:
- plex_watch = plexwatch.PlexWatch()
- user_details = plex_watch.get_user_details(user)
+ data_factory = datafactory.DataFactory()
+ user_details = data_factory.get_user_details(user=user)
except:
logger.warn("Unable to retrieve friendly name for user %s " % user)
@@ -127,9 +124,9 @@ class WebInterface(object):
def edit_user_dialog(self, user=None, **kwargs):
if user:
try:
- plex_watch = plexwatch.PlexWatch()
+ data_factory = datafactory.DataFactory()
result = {'user': user,
- 'friendly_name': plex_watch.get_user_friendly_name(user)
+ 'friendly_name': data_factory.get_user_friendly_name(user)
}
status_message = ""
except:
@@ -146,10 +143,6 @@ class WebInterface(object):
def edit_user(self, user=None, friendly_name=None, **kwargs):
if user:
try:
- plex_watch = plexwatch.PlexWatch()
- plex_watch.set_user_friendly_name(user, friendly_name)
-
- # For the new database too
data_factory = datafactory.DataFactory()
data_factory.set_user_friendly_name(user, friendly_name)
@@ -162,23 +155,14 @@ class WebInterface(object):
@cherrypy.expose
def get_stream_data(self, row_id=None, user=None, **kwargs):
- plex_watch = plexwatch.PlexWatch()
- stream_data = plex_watch.get_stream_details(row_id)
+ data_factory = datafactory.DataFactory()
+ stream_data = data_factory.get_stream_details(row_id)
return serve_template(templatename="stream_data.html", title="Stream Data", data=stream_data, user=user)
@cherrypy.expose
def get_user_list(self, start=0, length=100, **kwargs):
- plex_watch = plexwatch.PlexWatch()
- users = plex_watch.get_user_list(start, length, kwargs)
-
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(users)
-
- @cherrypy.expose
- def get_user_list_new(self, start=0, length=100, **kwargs):
-
data_factory = datafactory.DataFactory()
users = data_factory.get_user_list(start, length, kwargs)
@@ -430,25 +414,6 @@ class WebInterface(object):
@cherrypy.expose
def get_history(self, start=0, length=100, custom_where='', **kwargs):
- if 'user' in kwargs:
- user = kwargs.get('user', "")
- custom_where = 'user = "%s"' % user
- if 'rating_key' in kwargs:
- rating_key = kwargs.get('rating_key', "")
- custom_where = 'rating_key = %s' % rating_key
- if 'grandparent_rating_key' in kwargs:
- rating_key = kwargs.get('grandparent_rating_key', "")
- custom_where = 'grandparent_rating_key = %s' % rating_key
-
- plex_watch = plexwatch.PlexWatch()
- history = plex_watch.get_history(start, length, kwargs, custom_where)
-
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(history)
-
- @cherrypy.expose
- def get_history_new(self, start=0, length=100, custom_where='', **kwargs):
-
if 'user' in kwargs:
user = kwargs.get('user', "")
custom_where = 'user = "%s"' % user
@@ -466,19 +431,11 @@ class WebInterface(object):
return json.dumps(history)
@cherrypy.expose
- def clear_all_history_new(self, **kwargs):
+ def clear_all_history(self, **kwargs):
+ from plexpy import monitor
- monitor.clear_history_tables()
- raise cherrypy.HTTPRedirect("history_new")
-
- @cherrypy.expose
- def get_stream_details(self, rating_key=0, **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- stream_details = plex_watch.get_stream_details(rating_key)
-
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(stream_details)
+ threading.Thread(target=monitor.clear_history_tables).start()
+ raise cherrypy.HTTPRedirect("config")
@cherrypy.expose
def shutdown(self):
@@ -655,8 +612,8 @@ class WebInterface(object):
@cherrypy.expose
def get_user_recently_watched(self, user=None, limit='10', **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_recently_watched(user, limit)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_recently_watched(user=user, limit=limit)
if result:
return serve_template(templatename="user_recently_watched.html", data=result,
@@ -669,8 +626,8 @@ class WebInterface(object):
@cherrypy.expose
def get_user_watch_time_stats(self, user=None, **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_user_watch_time_stats(user)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_user_watch_time_stats(user=user)
if result:
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
@@ -681,8 +638,8 @@ class WebInterface(object):
@cherrypy.expose
def get_user_platform_stats(self, user=None, **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_user_platform_stats(user)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_user_platform_stats(user=user)
if result:
return serve_template(templatename="user_platform_stats.html", data=result,
@@ -751,18 +708,6 @@ class WebInterface(object):
else:
logger.warn('Unable to retrieve data.')
- @cherrypy.expose
- def get_stream(self, row_id='', **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_stream_details('122')
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return result
- else:
- logger.warn('Unable to retrieve data.')
-
@cherrypy.expose
def get_user_ips(self, start=0, length=100, custom_where='', **kwargs):
@@ -770,77 +715,20 @@ class WebInterface(object):
user = kwargs.get('user', "")
custom_where = 'user = "%s"' % user
- plex_watch = plexwatch.PlexWatch()
- history = plex_watch.get_user_unique_ips(start, length, kwargs, custom_where)
+ data_factory = datafactory.DataFactory()
+ history = data_factory.get_user_unique_ips(start=start,
+ length=length,
+ kwargs=kwargs,
+ custom_where=custom_where)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(history)
- @cherrypy.expose
- def get_watched(self, user=None, limit='10', **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_recently_watched(user, limit)
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(result)
- else:
- logger.warn('Unable to retrieve data.')
-
- @cherrypy.expose
- def get_time_stats(self, user=None, **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_user_watch_time_stats(user)
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(result)
- else:
- logger.warn('Unable to retrieve data.')
-
- @cherrypy.expose
- def get_platform_stats(self, user=None, **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_user_platform_stats(user)
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(result)
- else:
- logger.warn('Unable to retrieve data.')
-
- @cherrypy.expose
- def get_user_gravatar_image(self, user=None, **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_user_gravatar_image(user)
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(result)
- else:
- logger.warn('Unable to retrieve data.')
-
- @cherrypy.expose
- def get_home_stats(self, time_range='30', **kwargs):
-
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_home_stats(time_range)
-
- if result:
- cherrypy.response.headers['Content-type'] = 'application/json'
- return json.dumps(result)
- else:
- logger.warn('Unable to retrieve data.')
-
@cherrypy.expose
def get_plays_by_date(self, time_range='30', **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_total_plays_per_day(time_range)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_total_plays_per_day(time_range=time_range)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
@@ -851,8 +739,8 @@ class WebInterface(object):
@cherrypy.expose
def get_plays_by_dayofweek(self, time_range='30', **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_total_plays_per_dayofweek(time_range)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_total_plays_per_dayofweek(time_range=time_range)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
@@ -863,8 +751,8 @@ class WebInterface(object):
@cherrypy.expose
def get_plays_by_hourofday(self, time_range='30', **kwargs):
- plex_watch = plexwatch.PlexWatch()
- result = plex_watch.get_total_plays_per_hourofday(time_range)
+ data_factory = datafactory.DataFactory()
+ result = data_factory.get_total_plays_per_hourofday(time_range=time_range)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
@@ -1040,3 +928,22 @@ class WebInterface(object):
return result
else:
logger.warn('Unable to retrieve data.')
+
+ @cherrypy.expose
+ def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs):
+ from plexpy import plexwatch_import
+
+ db_check_msg = plexwatch_import.validate_database(database=database_path,
+ table_name=table_name)
+ if db_check_msg == 'success':
+ threading.Thread(target=plexwatch_import.import_from_plexwatch,
+ kwargs={'database': database_path,
+ 'table_name': table_name,
+ 'import_ignore_interval': import_ignore_interval}).start()
+ return 'Import has started. Check the PlexPy logs to monitor any problems.'
+ else:
+ return db_check_msg
+
+ @cherrypy.expose
+ def plexwatch_import(self, **kwargs):
+ return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database")
\ No newline at end of file