mirror of
https://github.com/Tautulli/Tautulli.git
synced 2025-08-21 05:43:22 -07:00
Update apshceduler-3.8.0
This commit is contained in:
parent
ec3702c358
commit
dd32f1e5b4
16 changed files with 81 additions and 50 deletions
|
@ -54,7 +54,7 @@ class MongoDBJobStore(BaseJobStore):
|
|||
|
||||
def start(self, scheduler, alias):
|
||||
super(MongoDBJobStore, self).start(scheduler, alias)
|
||||
self.collection.ensure_index('next_run_time', sparse=True)
|
||||
self.collection.create_index('next_run_time', sparse=True)
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
|
@ -83,7 +83,7 @@ class MongoDBJobStore(BaseJobStore):
|
|||
|
||||
def add_job(self, job):
|
||||
try:
|
||||
self.collection.insert({
|
||||
self.collection.insert_one({
|
||||
'_id': job.id,
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
|
@ -96,13 +96,13 @@ class MongoDBJobStore(BaseJobStore):
|
|||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': Binary(pickle.dumps(job.__getstate__(), self.pickle_protocol))
|
||||
}
|
||||
result = self.collection.update({'_id': job.id}, {'$set': changes})
|
||||
if result and result['n'] == 0:
|
||||
result = self.collection.update_one({'_id': job.id}, {'$set': changes})
|
||||
if result and result.matched_count == 0:
|
||||
raise JobLookupError(job.id)
|
||||
|
||||
def remove_job(self, job_id):
|
||||
result = self.collection.remove(job_id)
|
||||
if result and result['n'] == 0:
|
||||
result = self.collection.delete_one({'_id': job_id})
|
||||
if result and result.deleted_count == 0:
|
||||
raise JobLookupError(job_id)
|
||||
|
||||
def remove_all_jobs(self):
|
||||
|
|
|
@ -11,7 +11,7 @@ except ImportError: # pragma: nocover
|
|||
|
||||
try:
|
||||
from sqlalchemy import (
|
||||
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select)
|
||||
create_engine, Table, Column, MetaData, Unicode, Float, LargeBinary, select, and_)
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.sql.expression import null
|
||||
except ImportError: # pragma: nocover
|
||||
|
@ -134,7 +134,7 @@ class SQLAlchemyJobStore(BaseJobStore):
|
|||
jobs = []
|
||||
selectable = select([self.jobs_t.c.id, self.jobs_t.c.job_state]).\
|
||||
order_by(self.jobs_t.c.next_run_time)
|
||||
selectable = selectable.where(*conditions) if conditions else selectable
|
||||
selectable = selectable.where(and_(*conditions)) if conditions else selectable
|
||||
failed_job_ids = set()
|
||||
for row in self.engine.execute(selectable):
|
||||
try:
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from pytz import utc
|
||||
|
@ -65,7 +64,7 @@ class ZooKeeperJobStore(BaseJobStore):
|
|||
|
||||
def lookup_job(self, job_id):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, job_id)
|
||||
node_path = self.path + "/" + str(job_id)
|
||||
try:
|
||||
content, _ = self.client.get(node_path)
|
||||
doc = pickle.loads(content)
|
||||
|
@ -92,7 +91,7 @@ class ZooKeeperJobStore(BaseJobStore):
|
|||
|
||||
def add_job(self, job):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job.id))
|
||||
node_path = self.path + "/" + str(job.id)
|
||||
value = {
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': job.__getstate__()
|
||||
|
@ -105,7 +104,7 @@ class ZooKeeperJobStore(BaseJobStore):
|
|||
|
||||
def update_job(self, job):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job.id))
|
||||
node_path = self.path + "/" + str(job.id)
|
||||
changes = {
|
||||
'next_run_time': datetime_to_utc_timestamp(job.next_run_time),
|
||||
'job_state': job.__getstate__()
|
||||
|
@ -118,7 +117,7 @@ class ZooKeeperJobStore(BaseJobStore):
|
|||
|
||||
def remove_job(self, job_id):
|
||||
self._ensure_paths()
|
||||
node_path = os.path.join(self.path, str(job_id))
|
||||
node_path = self.path + "/" + str(job_id)
|
||||
try:
|
||||
self.client.delete(node_path)
|
||||
except NoNodeError:
|
||||
|
@ -151,7 +150,7 @@ class ZooKeeperJobStore(BaseJobStore):
|
|||
all_ids = self.client.get_children(self.path)
|
||||
for node_name in all_ids:
|
||||
try:
|
||||
node_path = os.path.join(self.path, node_name)
|
||||
node_path = self.path + "/" + node_name
|
||||
content, _ = self.client.get(node_path)
|
||||
doc = pickle.loads(content)
|
||||
job_def = {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue