Add scheduled task to optimize database

This commit is contained in:
JonnyWong16 2020-08-18 10:27:20 -07:00
parent f76bd2af8e
commit 4edd6ce911
No known key found for this signature in database
GPG key ID: 7A649674469E6574
3 changed files with 20 additions and 2 deletions

View file

@ -439,6 +439,8 @@ def initialize_scheduler():
backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6 backup_hours = CONFIG.BACKUP_INTERVAL if 1 <= CONFIG.BACKUP_INTERVAL <= 24 else 6
schedule_job(database.optimize, 'Optimize Tautulli database',
hours=24, minutes=0, seconds=0)
schedule_job(database.make_backup, 'Backup Tautulli database', schedule_job(database.make_backup, 'Backup Tautulli database',
hours=backup_hours, minutes=0, seconds=0, args=(True, True)) hours=backup_hours, minutes=0, seconds=0, args=(True, True))
schedule_job(config.make_backup, 'Backup Tautulli config', schedule_job(config.make_backup, 'Backup Tautulli config',

View file

@ -226,6 +226,7 @@ SCHEDULER_LIST = [
('Refresh users list', 'scheduled'), ('Refresh users list', 'scheduled'),
('Refresh libraries list', 'scheduled'), ('Refresh libraries list', 'scheduled'),
('Refresh Plex server URLs', 'scheduled'), ('Refresh Plex server URLs', 'scheduled'),
('Optimize Tautulli database', 'scheduled'),
('Backup Tautulli database', 'scheduled'), ('Backup Tautulli database', 'scheduled'),
('Backup Tautulli config', 'scheduled') ('Backup Tautulli config', 'scheduled')
] ]

View file

@ -180,7 +180,7 @@ def import_tautulli_db(database=None, method=None, backup=False):
for table_name in session_history_tables: for table_name in session_history_tables:
db.action('DROP TABLE {table}_copy'.format(table=table_name)) db.action('DROP TABLE {table}_copy'.format(table=table_name))
db.action('VACUUM') vacuum()
logger.info("Tautulli Database :: Tautulli database import complete.") logger.info("Tautulli Database :: Tautulli database import complete.")
set_is_importing(False) set_is_importing(False)
@ -199,7 +199,7 @@ def clear_table(table=None):
logger.debug("Tautulli Database :: Clearing database table '%s'." % table) logger.debug("Tautulli Database :: Clearing database table '%s'." % table)
try: try:
monitor_db.action('DELETE FROM %s' % table) monitor_db.action('DELETE FROM %s' % table)
monitor_db.action('VACUUM') vacuum()
return True return True
except Exception as e: except Exception as e:
logger.error("Tautulli Database :: Failed to clear database table '%s': %s." % (table, e)) logger.error("Tautulli Database :: Failed to clear database table '%s': %s." % (table, e))
@ -232,6 +232,7 @@ def delete_rows_from_table(table, row_ids):
for row_ids_group in helpers.chunk(row_ids, sqlite_max_variable_number): for row_ids_group in helpers.chunk(row_ids, sqlite_max_variable_number):
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids_group)) query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids_group))
monitor_db.action(query, row_ids_group) monitor_db.action(query, row_ids_group)
vacuum()
except Exception as e: except Exception as e:
logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e)) logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e))
return False return False
@ -274,6 +275,20 @@ def delete_library_history(section_id=None):
return delete_session_history_rows(row_ids=row_ids) return delete_session_history_rows(row_ids=row_ids)
def vacuum():
monitor_db = MonitorDatabase()
logger.info("Tautulli Database :: Vacuuming database.")
try:
monitor_db.action('VACUUM')
except Exception as e:
logger.error("Tautulli Database :: Failed to vacuum database: %s" % e)
def optimize():
vacuum()
def db_filename(filename=FILENAME): def db_filename(filename=FILENAME):
""" Returns the filepath to the db """ """ Returns the filepath to the db """