mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-07-10 15:32:38 -07:00
Fix deleteing more than 1000 history entries at the same time
This commit is contained in:
parent
4a8748e322
commit
ad195f0969
2 changed files with 16 additions and 7 deletions
|
@ -27,10 +27,10 @@ import time
|
||||||
import plexpy
|
import plexpy
|
||||||
if plexpy.PYTHON2:
|
if plexpy.PYTHON2:
|
||||||
import logger
|
import logger
|
||||||
from helpers import cast_to_int, bool_true
|
from helpers import cast_to_int, bool_true, chunk
|
||||||
else:
|
else:
|
||||||
from plexpy import logger
|
from plexpy import logger
|
||||||
from plexpy.helpers import cast_to_int, bool_true
|
from plexpy.helpers import cast_to_int, bool_true, chunk
|
||||||
|
|
||||||
|
|
||||||
FILENAME = "tautulli.db"
|
FILENAME = "tautulli.db"
|
||||||
|
@ -218,12 +218,16 @@ def delete_rows_from_table(table, row_ids):
|
||||||
|
|
||||||
if row_ids:
|
if row_ids:
|
||||||
logger.info("Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
|
logger.info("Tautulli Database :: Deleting row ids %s from %s database table", row_ids, table)
|
||||||
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids))
|
|
||||||
monitor_db = MonitorDatabase()
|
|
||||||
|
|
||||||
|
# SQlite verions prior to 3.32.0 (2020-05-22) have maximum variable limit of 999
|
||||||
|
# https://sqlite.org/limits.html
|
||||||
|
sqlite_max_variable_number = 999
|
||||||
|
|
||||||
|
monitor_db = MonitorDatabase()
|
||||||
try:
|
try:
|
||||||
monitor_db.action(query, row_ids)
|
for row_ids_group in chunk(row_ids, sqlite_max_variable_number):
|
||||||
return True
|
query = "DELETE FROM " + table + " WHERE id IN (%s) " % ','.join(['?'] * len(row_ids_group))
|
||||||
|
monitor_db.action(query, row_ids_group)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e))
|
logger.error("Tautulli Database :: Failed to delete rows from %s database table: %s" % (table, e))
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -31,7 +31,7 @@ import datetime
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
import hashlib
|
import hashlib
|
||||||
import imghdr
|
import imghdr
|
||||||
from future.moves.itertools import zip_longest
|
from future.moves.itertools import islice, zip_longest
|
||||||
import ipwhois
|
import ipwhois
|
||||||
import ipwhois.exceptions
|
import ipwhois.exceptions
|
||||||
import ipwhois.utils
|
import ipwhois.utils
|
||||||
|
@ -1068,6 +1068,11 @@ def grouper(iterable, n, fillvalue=None):
|
||||||
return zip_longest(fillvalue=fillvalue, *args)
|
return zip_longest(fillvalue=fillvalue, *args)
|
||||||
|
|
||||||
|
|
||||||
|
def chunk(it, size):
|
||||||
|
it = iter(it)
|
||||||
|
return iter(lambda: tuple(islice(it, size)), ())
|
||||||
|
|
||||||
|
|
||||||
def traverse_map(obj, func):
|
def traverse_map(obj, func):
|
||||||
if isinstance(obj, list):
|
if isinstance(obj, list):
|
||||||
new_obj = []
|
new_obj = []
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue