mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-19 21:03:21 -07:00
Add table to list exported items
This commit is contained in:
parent
c102020698
commit
5468676811
6 changed files with 357 additions and 0 deletions
|
@ -788,6 +788,13 @@ def dbcheck():
|
|||
'img_hash TEXT, cloudinary_title TEXT, cloudinary_url TEXT)'
|
||||
)
|
||||
|
||||
# exports table :: This table keeps record of the exported files
|
||||
c_db.execute(
|
||||
'CREATE TABLE IF NOT EXISTS exports (id INTEGER PRIMARY KEY AUTOINCREMENT, '
|
||||
'timestamp INTEGER, section_id INTEGER, rating_key INTEGER, media_type TEXT, '
|
||||
'filename TEXT, complete INTEGER DEFAULT 0)'
|
||||
)
|
||||
|
||||
# Upgrade sessions table from earlier versions
|
||||
try:
|
||||
c_db.execute('SELECT started FROM sessions')
|
||||
|
|
|
@ -27,10 +27,14 @@ from multiprocessing.dummy import Pool as ThreadPool
|
|||
|
||||
import plexpy
|
||||
if plexpy.PYTHON2:
|
||||
import database
|
||||
import datatables
|
||||
import helpers
|
||||
import logger
|
||||
from plex import Plex
|
||||
else:
|
||||
from plexpy import database
|
||||
from plexpy import datatables
|
||||
from plexpy import helpers
|
||||
from plexpy import logger
|
||||
from plexpy.plex import Plex
|
||||
|
@ -874,6 +878,7 @@ def export(section_id=None, rating_key=None, output_format='json'):
|
|||
|
||||
item = plex.get_item(helpers.cast_to_int(rating_key))
|
||||
media_type = item.type
|
||||
section_id = item.librarySectionID
|
||||
|
||||
if media_type in ('season', 'episode', 'album', 'track'):
|
||||
item_title = item._defaultSyncTitle()
|
||||
|
@ -895,6 +900,15 @@ def export(section_id=None, rating_key=None, output_format='json'):
|
|||
filepath = os.path.join(plexpy.CONFIG.CACHE_DIR, filename)
|
||||
logger.info("Tautulli Exporter :: Starting export for '%s'...", filename)
|
||||
|
||||
export_id = set_export_state(timestamp=timestamp,
|
||||
section_id=section_id,
|
||||
rating_key=rating_key,
|
||||
media_type=media_type,
|
||||
filename=filename)
|
||||
if not export_id:
|
||||
logger.error("Tautulli Exporter :: Failed to export '%s'", filename)
|
||||
return
|
||||
|
||||
attrs = MEDIA_TYPES[media_type]
|
||||
part = partial(helpers.get_attrs_to_dict, attrs=attrs)
|
||||
|
||||
|
@ -913,4 +927,93 @@ def export(section_id=None, rating_key=None, output_format='json'):
|
|||
writer.writeheader()
|
||||
writer.writerows(flatten_result)
|
||||
|
||||
set_export_complete(export_id=export_id)
|
||||
logger.info("Tautulli Exporter :: Successfully exported to '%s'", filepath)
|
||||
|
||||
|
||||
def set_export_state(timestamp, section_id, rating_key, media_type, filename):
|
||||
keys = {'timestamp': timestamp,
|
||||
'section_id': section_id,
|
||||
'rating_key': rating_key,
|
||||
'media_type': media_type}
|
||||
|
||||
values = {'filename': filename}
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
try:
|
||||
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
|
||||
return db.last_insert_id()
|
||||
except Exception as e:
|
||||
logger.error("Tautulli Exporter :: Unable to save export to database: %s", e)
|
||||
return False
|
||||
|
||||
|
||||
def set_export_complete(export_id):
|
||||
keys = {'id': export_id}
|
||||
values = {'complete': 1}
|
||||
|
||||
db = database.MonitorDatabase()
|
||||
db.upsert(table_name='exports', key_dict=keys, value_dict=values)
|
||||
|
||||
|
||||
def get_export_datatable(section_id=None, rating_key=None, kwargs=None):
|
||||
default_return = {'recordsFiltered': 0,
|
||||
'recordsTotal': 0,
|
||||
'draw': 0,
|
||||
'data': 'null',
|
||||
'error': 'Unable to execute database query.'}
|
||||
|
||||
data_tables = datatables.DataTables()
|
||||
|
||||
custom_where = []
|
||||
if section_id:
|
||||
custom_where.append(['exports.section_id', section_id])
|
||||
if rating_key:
|
||||
custom_where.append(['exports.rating_key', rating_key])
|
||||
|
||||
columns = ['exports.id AS row_id',
|
||||
'exports.timestamp',
|
||||
'exports.section_id',
|
||||
'exports.rating_key',
|
||||
'exports.media_type',
|
||||
'exports.filename',
|
||||
'exports.complete'
|
||||
]
|
||||
try:
|
||||
query = data_tables.ssp_query(table_name='exports',
|
||||
columns=columns,
|
||||
custom_where=custom_where,
|
||||
group_by=[],
|
||||
join_types=[],
|
||||
join_tables=[],
|
||||
join_evals=[],
|
||||
kwargs=kwargs)
|
||||
except Exception as e:
|
||||
logger.warn("Tautulli Exporter :: Unable to execute database query for get_export_datatable: %s." % e)
|
||||
return default_return
|
||||
|
||||
result = query['result']
|
||||
|
||||
rows = []
|
||||
for item in result:
|
||||
media_type_title = item['media_type'].title()
|
||||
|
||||
row = {'row_id': item['row_id'],
|
||||
'timestamp': item['timestamp'],
|
||||
'section_id': item['section_id'],
|
||||
'rating_key': item['rating_key'],
|
||||
'media_type': item['media_type'],
|
||||
'media_type_title': media_type_title,
|
||||
'filename': item['filename'],
|
||||
'complete': item['complete']
|
||||
}
|
||||
|
||||
rows.append(row)
|
||||
|
||||
result = {'recordsFiltered': query['filteredCount'],
|
||||
'recordsTotal': query['totalCount'],
|
||||
'data': rows,
|
||||
'draw': query['draw']
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -870,6 +870,65 @@ class WebInterface(object):
|
|||
|
||||
return {'success': result}
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
@addtoapi()
|
||||
def get_library_export(self, section_id=None, rating_key=None, **kwargs):
|
||||
""" Get the data on the Tautulli export tables.
|
||||
|
||||
```
|
||||
Required parameters:
|
||||
section_id (str): The id of the Plex library section, OR
|
||||
rating_key (str): The rating key of the exported item
|
||||
|
||||
Optional parameters:
|
||||
order_column (str): "added_at", "sort_title", "container", "bitrate", "video_codec",
|
||||
"video_resolution", "video_framerate", "audio_codec", "audio_channels",
|
||||
"file_size", "last_played", "play_count"
|
||||
order_dir (str): "desc" or "asc"
|
||||
start (int): Row to start from, 0
|
||||
length (int): Number of items to return, 25
|
||||
search (str): A string to search for, "Thrones"
|
||||
|
||||
Returns:
|
||||
json:
|
||||
{"draw": 1,
|
||||
"recordsTotal": 10,
|
||||
"recordsFiltered": 3,
|
||||
"data":
|
||||
[{"row_id": 2,
|
||||
"timestamp": 1596484600,
|
||||
"section_id": 1,
|
||||
"rating_key": 270716,
|
||||
"media_type": "movie",
|
||||
"media_type_title": "Movie",
|
||||
"filename": "Movie - Frozen II [270716].20200803125640.json",
|
||||
"complete": 1
|
||||
},
|
||||
{...},
|
||||
{...}
|
||||
]
|
||||
}
|
||||
```
|
||||
"""
|
||||
# Check if datatables json_data was received.
|
||||
# If not, then build the minimal amount of json data for a query
|
||||
if not kwargs.get('json_data'):
|
||||
# TODO: Find some one way to automatically get the columns
|
||||
dt_columns = [("timestamp", True, False),
|
||||
("media_type_title", True, True),
|
||||
("rating_key", True, True),
|
||||
("filename", True, True),
|
||||
("complete", True, False)]
|
||||
kwargs['json_data'] = build_datatables_json(kwargs, dt_columns, "timestamp")
|
||||
|
||||
result = exporter.get_export_datatable(section_id=section_id,
|
||||
rating_key=rating_key,
|
||||
kwargs=kwargs)
|
||||
|
||||
return result
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
@requireAuth(member_of("admin"))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue