This commit is contained in:
Hellowlol 2015-12-16 00:28:15 +01:00
parent 9359567a8a
commit 2fcd55eb60
10 changed files with 1671 additions and 156 deletions

5
.gitignore vendored
View file

@ -23,6 +23,9 @@ cache/*
*.csr
*.pem
# Mergetool
*.orgin
# OS generated files #
######################
.DS_Store?
@ -32,7 +35,7 @@ Icon?
Thumbs.db
#Ignore files generated by PyCharm
.idea/*
*.idea/*
#Ignore files generated by vi
*.swp

732
lib/profilehooks.py Normal file
View file

@ -0,0 +1,732 @@
"""
Profiling hooks
This module contains a couple of decorators (`profile` and `coverage`) that
can be used to wrap functions and/or methods to produce profiles and line
coverage reports. There's a third convenient decorator (`timecall`) that
measures the duration of function execution without the extra profiling
overhead.
Usage example (Python 2.4 or newer)::
from profilehooks import profile, coverage
@profile # or @coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
print fn(42)
Usage example (Python 2.3 or older)::
from profilehooks import profile, coverage
def fn(n):
if n < 2: return 1
else: return n * fn(n-1)
# Now wrap that function in a decorator
fn = profile(fn) # or coverage(fn)
print fn(42)
Reports for all thusly decorated functions will be printed to sys.stdout
on program termination. You can alternatively request for immediate
reports for each call by passing immediate=True to the profile decorator.
There's also a @timecall decorator for printing the time to sys.stderr
every time a function is called, when you just want to get a rough measure
instead of a detailed (but costly) profile.
Caveats
A thread on python-dev convinced me that hotshot produces bogus numbers.
See http://mail.python.org/pipermail/python-dev/2005-November/058264.html
I don't know what will happen if a decorated function will try to call
another decorated function. All decorators probably need to explicitly
support nested profiling (currently TraceFuncCoverage is the only one
that supports this, while HotShotFuncProfile has support for recursive
functions.)
Profiling with hotshot creates temporary files (*.prof for profiling,
*.cprof for coverage) in the current directory. These files are not
cleaned up. Exception: when you specify a filename to the profile
decorator (to store the pstats.Stats object for later inspection),
the temporary file will be the filename you specified with '.raw'
appended at the end.
Coverage analysis with hotshot seems to miss some executions resulting
in lower line counts and some lines errorneously marked as never
executed. For this reason coverage analysis now uses trace.py which is
slower, but more accurate.
Copyright (c) 2004--2008 Marius Gedminas <marius@pov.lt>
Copyright (c) 2007 Hanno Schlichting
Copyright (c) 2008 Florian Schulze
Released under the MIT licence since December 2006:
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
(Previously it was distributed under the GNU General Public Licence.)
"""
# $Id: profilehooks.py 29 2010-08-13 16:29:20Z mg $
__author__ = "Marius Gedminas (marius@gedmin.as)"
__copyright__ = "Copyright 2004-2009 Marius Gedminas"
__license__ = "MIT"
__version__ = "1.4"
__date__ = "2009-03-31"
import atexit
import inspect
import sys
import re
# For profiling
from profile import Profile
import pstats
# For hotshot profiling (inaccurate!)
try:
import hotshot
import hotshot.stats
except ImportError:
hotshot = None
# For trace.py coverage
import trace
# For hotshot coverage (inaccurate!; uses undocumented APIs; might break)
if hotshot is not None:
import _hotshot
import hotshot.log
# For cProfile profiling (best)
try:
import cProfile
except ImportError:
cProfile = None
# For timecall
import time
# registry of available profilers
AVAILABLE_PROFILERS = {}
def profile(fn=None, skip=0, filename=None, immediate=False, dirs=False,
sort=None, entries=40,
profiler=('cProfile', 'profile', 'hotshot')):
"""Mark `fn` for profiling.
If `skip` is > 0, first `skip` calls to `fn` will not be profiled.
If `immediate` is False, profiling results will be printed to
sys.stdout on program termination. Otherwise results will be printed
after each call.
If `dirs` is False only the name of the file will be printed.
Otherwise the full path is used.
`sort` can be a list of sort keys (defaulting to ['cumulative',
'time', 'calls']). The following ones are recognized::
'calls' -- call count
'cumulative' -- cumulative time
'file' -- file name
'line' -- line number
'module' -- file name
'name' -- function name
'nfl' -- name/file/line
'pcalls' -- call count
'stdname' -- standard name
'time' -- internal time
`entries` limits the output to the first N entries.
`profiler` can be used to select the preferred profiler, or specify a
sequence of them, in order of preference. The default is ('cProfile'.
'profile', 'hotshot').
If `filename` is specified, the profile stats will be stored in the
named file. You can load them pstats.Stats(filename).
Usage::
def fn(...):
...
fn = profile(fn, skip=1)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@profile(skip=3)
def fn(...):
...
or just ::
@profile
def fn(...):
...
"""
if fn is None: # @profile() syntax -- we are a decorator maker
def decorator(fn):
return profile(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries,
profiler=profiler)
return decorator
# @profile syntax -- we are a decorator.
if isinstance(profiler, str):
profiler = [profiler]
for p in profiler:
if p in AVAILABLE_PROFILERS:
profiler_class = AVAILABLE_PROFILERS[p]
break
else:
raise ValueError('only these profilers are available: %s'
% ', '.join(AVAILABLE_PROFILERS))
fp = profiler_class(fn, skip=skip, filename=filename,
immediate=immediate, dirs=dirs,
sort=sort, entries=entries)
# fp = HotShotFuncProfile(fn, skip=skip, filename=filename, ...)
# or HotShotFuncProfile
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
def coverage(fn):
"""Mark `fn` for line coverage analysis.
Results will be printed to sys.stdout on program termination.
Usage::
def fn(...):
...
fn = coverage(fn)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@coverage
def fn(...):
...
"""
fp = TraceFuncCoverage(fn) # or HotShotFuncCoverage
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
def coverage_with_hotshot(fn):
"""Mark `fn` for line coverage analysis.
Uses the 'hotshot' module for fast coverage analysis.
BUG: Produces inaccurate results.
See the docstring of `coverage` for usage examples.
"""
fp = HotShotFuncCoverage(fn)
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
class FuncProfile(object):
"""Profiler for a function (uses profile)."""
# This flag is shared between all instances
in_profiler = False
Profile = Profile
def __init__(self, fn, skip=0, filename=None, immediate=False, dirs=False,
sort=None, entries=40):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
FuncProfile registers an atexit handler that prints profiling
information to sys.stderr when the program terminates.
"""
self.fn = fn
self.skip = skip
self.filename = filename
self.immediate = immediate
self.dirs = dirs
self.sort = sort or ('cumulative', 'time', 'calls')
if isinstance(self.sort, str):
self.sort = (self.sort, )
self.entries = entries
self.reset_stats()
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if self.skip > 0:
self.skip -= 1
self.skipped += 1
return self.fn(*args, **kw)
if FuncProfile.in_profiler:
# handle recursive calls
return self.fn(*args, **kw)
# You cannot reuse the same profiler for many calls and accumulate
# stats that way. :-/
profiler = self.Profile()
try:
FuncProfile.in_profiler = True
return profiler.runcall(self.fn, *args, **kw)
finally:
FuncProfile.in_profiler = False
self.stats.add(profiler)
if self.immediate:
self.print_stats()
self.reset_stats()
def print_stats(self):
"""Print profile information to sys.stdout."""
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** PROFILER RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls,
if self.skipped:
print "(%d calls not profiled)" % self.skipped
else:
print
print
stats = self.stats
if self.filename:
stats.dump_stats(self.filename)
if not self.dirs:
stats.strip_dirs()
stats.sort_stats(*self.sort)
stats.print_stats(self.entries)
def reset_stats(self):
"""Reset accumulated profiler statistics."""
# Note: not using self.Profile, since pstats.Stats() fails then
self.stats = pstats.Stats(Profile())
self.ncalls = 0
self.skipped = 0
def atexit(self):
"""Stop profiling and print profile information to sys.stdout.
This function is registered as an atexit hook.
"""
if not self.immediate:
self.print_stats()
AVAILABLE_PROFILERS['profile'] = FuncProfile
if cProfile is not None:
class CProfileFuncProfile(FuncProfile):
"""Profiler for a function (uses cProfile)."""
Profile = cProfile.Profile
AVAILABLE_PROFILERS['cProfile'] = CProfileFuncProfile
if hotshot is not None:
class HotShotFuncProfile(object):
"""Profiler for a function (uses hotshot)."""
# This flag is shared between all instances
in_profiler = False
def __init__(self, fn, skip=0, filename=None):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
HotShotFuncProfile registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.filename = filename
if self.filename:
self.logfilename = filename + ".raw"
else:
self.logfilename = fn.__name__ + ".prof"
self.profiler = hotshot.Profile(self.logfilename)
self.ncalls = 0
self.skip = skip
self.skipped = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if self.skip > 0:
self.skip -= 1
self.skipped += 1
return self.fn(*args, **kw)
if HotShotFuncProfile.in_profiler:
# handle recursive calls
return self.fn(*args, **kw)
try:
HotShotFuncProfile.in_profiler = True
return self.profiler.runcall(self.fn, *args, **kw)
finally:
HotShotFuncProfile.in_profiler = False
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
self.profiler.close()
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** PROFILER RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls,
if self.skipped:
print "(%d calls not profiled)" % self.skipped
else:
print
print
stats = hotshot.stats.load(self.logfilename)
# hotshot.stats.load takes ages, and the .prof file eats megabytes, but
# a saved stats object is small and fast
if self.filename:
stats.dump_stats(self.filename)
# it is best to save before strip_dirs
stats.strip_dirs()
stats.sort_stats('cumulative', 'time', 'calls')
stats.print_stats(40)
AVAILABLE_PROFILERS['hotshot'] = HotShotFuncProfile
class HotShotFuncCoverage:
"""Coverage analysis for a function (uses _hotshot).
HotShot coverage is reportedly faster than trace.py, but it appears to
have problems with exceptions; also line counts in coverage reports
are generally lower from line counts produced by TraceFuncCoverage.
Is this my bug, or is it a problem with _hotshot?
"""
def __init__(self, fn):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
HotShotFuncCoverage registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.logfilename = fn.__name__ + ".cprof"
self.profiler = _hotshot.coverage(self.logfilename)
self.ncalls = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
return self.profiler.runcall(self.fn, args, kw)
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
self.profiler.close()
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** COVERAGE RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls
print
fs = FuncSource(self.fn)
reader = hotshot.log.LogReader(self.logfilename)
for what, (filename, lineno, funcname), tdelta in reader:
if filename != fs.filename:
continue
if what == hotshot.log.LINE:
fs.mark(lineno)
if what == hotshot.log.ENTER:
# hotshot gives us the line number of the function definition
# and never gives us a LINE event for the first statement in
# a function, so if we didn't perform this mapping, the first
# statement would be marked as never executed
if lineno == fs.firstlineno:
lineno = fs.firstcodelineno
fs.mark(lineno)
reader.close()
print fs
class TraceFuncCoverage:
"""Coverage analysis for a function (uses trace module).
HotShot coverage analysis is reportedly faster, but it appears to have
problems with exceptions.
"""
# Shared between all instances so that nested calls work
tracer = trace.Trace(count=True, trace=False,
ignoredirs=[sys.prefix, sys.exec_prefix])
# This flag is also shared between all instances
tracing = False
def __init__(self, fn):
"""Creates a profiler for a function.
Every profiler has its own log file (the name of which is derived
from the function name).
TraceFuncCoverage registers an atexit handler that prints
profiling information to sys.stderr when the program terminates.
The log file is not removed and remains there to clutter the
current working directory.
"""
self.fn = fn
self.logfilename = fn.__name__ + ".cprof"
self.ncalls = 0
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
self.ncalls += 1
if TraceFuncCoverage.tracing:
return self.fn(*args, **kw)
try:
TraceFuncCoverage.tracing = True
return self.tracer.runfunc(self.fn, *args, **kw)
finally:
TraceFuncCoverage.tracing = False
def atexit(self):
"""Stop profiling and print profile information to sys.stderr.
This function is registered as an atexit hook.
"""
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print
print "*** COVERAGE RESULTS ***"
print "%s (%s:%s)" % (funcname, filename, lineno)
print "function called %d times" % self.ncalls
print
fs = FuncSource(self.fn)
for (filename, lineno), count in self.tracer.counts.items():
if filename != fs.filename:
continue
fs.mark(lineno, count)
print fs
never_executed = fs.count_never_executed()
if never_executed:
print "%d lines were not executed." % never_executed
class FuncSource:
"""Source code annotator for a function."""
blank_rx = re.compile(r"^\s*finally:\s*(#.*)?$")
def __init__(self, fn):
self.fn = fn
self.filename = inspect.getsourcefile(fn)
self.source, self.firstlineno = inspect.getsourcelines(fn)
self.sourcelines = {}
self.firstcodelineno = self.firstlineno
self.find_source_lines()
def find_source_lines(self):
"""Mark all executable source lines in fn as executed 0 times."""
strs = trace.find_strings(self.filename)
lines = trace.find_lines_from_code(self.fn.func_code, strs)
self.firstcodelineno = sys.maxint
for lineno in lines:
self.firstcodelineno = min(self.firstcodelineno, lineno)
self.sourcelines.setdefault(lineno, 0)
if self.firstcodelineno == sys.maxint:
self.firstcodelineno = self.firstlineno
def mark(self, lineno, count=1):
"""Mark a given source line as executed count times.
Multiple calls to mark for the same lineno add up.
"""
self.sourcelines[lineno] = self.sourcelines.get(lineno, 0) + count
def count_never_executed(self):
"""Count statements that were never executed."""
lineno = self.firstlineno
counter = 0
for line in self.source:
if self.sourcelines.get(lineno) == 0:
if not self.blank_rx.match(line):
counter += 1
lineno += 1
return counter
def __str__(self):
"""Return annotated source code for the function."""
lines = []
lineno = self.firstlineno
for line in self.source:
counter = self.sourcelines.get(lineno)
if counter is None:
prefix = ' ' * 7
elif counter == 0:
if self.blank_rx.match(line):
prefix = ' ' * 7
else:
prefix = '>' * 6 + ' '
else:
prefix = '%5d: ' % counter
lines.append(prefix + line)
lineno += 1
return ''.join(lines)
def timecall(fn=None, immediate=True, timer=time.time):
"""Wrap `fn` and print its execution time.
Example::
@timecall
def somefunc(x, y):
time.sleep(x * y)
somefunc(2, 3)
will print the time taken by somefunc on every call. If you want just
a summary at program termination, use
@timecall(immediate=False)
You can also choose a timing method other than the default ``time.time()``,
e.g.:
@timecall(timer=time.clock)
"""
if fn is None: # @timecall() syntax -- we are a decorator maker
def decorator(fn):
return timecall(fn, immediate=immediate, timer=timer)
return decorator
# @timecall syntax -- we are a decorator.
fp = FuncTimer(fn, immediate=immediate, timer=timer)
# We cannot return fp or fp.__call__ directly as that would break method
# definitions, instead we need to return a plain function.
def new_fn(*args, **kw):
return fp(*args, **kw)
new_fn.__doc__ = fn.__doc__
new_fn.__name__ = fn.__name__
new_fn.__dict__ = fn.__dict__
new_fn.__module__ = fn.__module__
return new_fn
class FuncTimer(object):
def __init__(self, fn, immediate, timer):
self.fn = fn
self.ncalls = 0
self.totaltime = 0
self.immediate = immediate
self.timer = timer
if not immediate:
atexit.register(self.atexit)
def __call__(self, *args, **kw):
"""Profile a singe call to the function."""
fn = self.fn
timer = self.timer
self.ncalls += 1
try:
start = timer()
return fn(*args, **kw)
finally:
duration = timer() - start
self.totaltime += duration
if self.immediate:
funcname = fn.__name__
filename = fn.func_code.co_filename
lineno = fn.func_code.co_firstlineno
print >> sys.stderr, "\n %s (%s:%s):\n %.3f seconds\n" % (
funcname, filename, lineno, duration)
def atexit(self):
if not self.ncalls:
return
funcname = self.fn.__name__
filename = self.fn.func_code.co_filename
lineno = self.fn.func_code.co_firstlineno
print ("\n %s (%s:%s):\n"
" %d calls, %.3f seconds (%.3f seconds per call)\n" % (
funcname, filename, lineno, self.ncalls,
self.totaltime, self.totaltime / self.ncalls))

View file

@ -57,6 +57,7 @@ _INITIALIZED = False
started = False
DATA_DIR = None
BACKUP_DIR = None
CONFIG = None
@ -73,6 +74,7 @@ UMASK = None
POLLING_FAILOVER = False
def initialize(config_file):
with INIT_LOCK:
@ -82,7 +84,6 @@ def initialize(config_file):
global LATEST_VERSION
global UMASK
global POLLING_FAILOVER
CONFIG = plexpy.config.Config(config_file)
assert CONFIG is not None
@ -126,6 +127,12 @@ def initialize(config_file):
except OSError as e:
logger.error("Could not create cache dir '%s': %s", DATA_DIR, e)
plexpy.BACKUP_DIR = os.path.join(plexpy.PROG_DIR, 'backups')
try:
os.makedirs(plexpy.BACKUP_DIR)
except OSError:
pass
# Initialize the database
logger.info('Checking to see if the database has all tables....')
try:
@ -186,7 +193,6 @@ def initialize(config_file):
_INITIALIZED = True
return True
def daemonize():
if threading.activeCount() != 1:
logger.warn(
@ -801,6 +807,7 @@ def dbcheck():
conn_db.commit()
c_db.close()
def shutdown(restart=False, update=False):
cherrypy.engine.exit()
SCHED.shutdown(wait=False)
@ -833,6 +840,7 @@ def shutdown(restart=False, update=False):
os._exit(0)
def generate_uuid():
logger.debug(u"Generating UUID...")
return uuid.uuid4().hex

495
plexpy/api2.py Normal file
View file

@ -0,0 +1,495 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of PlexPy.
#
# PlexPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PlexPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
import hashlib
import inspect
import json
import os
import random
import re
import time
import traceback
import cherrypy
import xmltodict
import database
import logger
import plexpy
class API2:
def __init__(self, **kwargs):
self._api_valid_methods = self._api_docs().keys()
self._api_authenticated = False
self._api_out_type = 'json' # default
self._api_msg = None
self._api_debug = None
self._api_cmd = None
self._api_apikey = None
self._api_callback = None # JSONP
self._api_result_type = 'failed'
self._api_profileme = None # For profiling the api call
self._api_kwargs = None # Cleaned kwargs
def _api_docs(self, md=False):
""" Makes the api docs """
docs = {}
for f, _ in inspect.getmembers(self, predicate=inspect.ismethod):
if not f.startswith('_') and not f.startswith('_api'):
if md is True:
docs[f] = inspect.getdoc(getattr(self, f)) if inspect.getdoc(getattr(self, f)) else None
else:
docs[f] = ' '.join(inspect.getdoc(getattr(self, f)).split()) if inspect.getdoc(getattr(self, f)) else None
return docs
def docs_md(self):
""" Return a API.md to simplify api docs because of the decorator. """
return self._api_make_md()
def docs(self):
""" Returns a dict where commands are keys, docstring are value. """
return self._api_docs()
def _api_validate(self, *args, **kwargs):
""" sets class vars and remove unneeded parameters. """
if not plexpy.CONFIG.API_ENABLED:
self._api_msg = 'API not enabled'
elif not plexpy.CONFIG.API_KEY:
self._api_msg = 'API key not generated'
elif len(plexpy.CONFIG.API_KEY) != 32:
self._api_msg = 'API key not generated correctly'
elif 'apikey' not in kwargs:
self._api_msg = 'Parameter apikey is required'
elif kwargs.get('apikey', '') != plexpy.CONFIG.API_KEY:
self._api_msg = 'Invalid apikey'
elif 'cmd' not in kwargs:
self._api_msg = 'Parameter cmd is required. Possible commands are: %s' % ', '.join(self._api_valid_methods)
elif 'cmd' in kwargs and kwargs.get('cmd') not in self._api_valid_methods:
self._api_msg = 'Unknown command: %s. Possible commands are: %s' % (kwargs.get('cmd', ''), ', '.join(self._api_valid_methods))
self._api_callback = kwargs.pop('callback', None)
self._api_apikey = kwargs.pop('apikey', None)
self._api_cmd = kwargs.pop('cmd', None)
self._api_debug = kwargs.pop('debug', False)
self._api_profileme = kwargs.pop('profileme', None)
# Allow override for the api.
self._api_out_type = kwargs.pop('out_type', 'json')
if self._api_apikey == plexpy.CONFIG.API_KEY and plexpy.CONFIG.API_ENABLED and self._api_cmd in self._api_valid_methods:
self._api_authenticated = True
self._api_msg = None
self._api_kwargs = kwargs
elif self._api_cmd in ('get_apikey', 'docs', 'docs_md') and plexpy.CONFIG.API_ENABLED:
self._api_authenticated = True
# Remove the old error msg
self._api_msg = None
self._api_kwargs = kwargs
logger.debug(u'PlexPy APIv2 :: Cleaned kwargs %s' % self._api_kwargs)
return self._api_kwargs
def get_logs(self, sort='', search='', order='desc', regex='', start=0, end=0, **kwargs):
"""
Returns the log
Args:
sort(string, optional): time, thread, msg, loglevel
search(string, optional): 'string'
order(string, optional): desc, asc
regex(string, optional): 'regexstring'
start(int, optional): int
end(int, optional): int
Returns:
```{"response":
{"msg": "Hey",
"result": "success"},
"data": [
{"time": "29-sept.2015",
"thread: "MainThread",
"msg: "Called x from y",
"loglevel": "DEBUG"
}
]
}
```
"""
logfile = os.path.join(plexpy.CONFIG.LOG_DIR, 'plexpy.log')
templog = []
start = int(kwargs.get('start', 0))
end = int(kwargs.get('end', 0))
if regex:
logger.debug(u'PlexPy APIv2 :: Filtering log using regex %s' % regex)
reg = re.compile('u' + regex, flags=re.I)
for line in open(logfile, 'r').readlines():
temp_loglevel_and_time = None
try:
temp_loglevel_and_time = line.split('- ')
loglvl = temp_loglevel_and_time[1].split(' :')[0].strip()
tl_tread = line.split(' :: ')
if loglvl is None:
msg = line.replace('\n', '')
else:
msg = line.split(' : ')[1].replace('\n', '')
thread = tl_tread[1].split(' : ')[0]
except IndexError:
# We assume this is a traceback
tl = (len(templog) - 1)
templog[tl]['msg'] += line.replace('\n', '')
continue
if len(line) > 1 and temp_loglevel_and_time is not None and loglvl in line:
d = {
'time': temp_loglevel_and_time[0],
'loglevel': loglvl,
'msg': msg.replace('\n', ''),
'thread': thread
}
templog.append(d)
if end > 0 or start > 0:
logger.debug(u'PlexPy APIv2 :: Slicing the log from %s to %s' % (start, end))
templog = templog[start:end]
if sort:
logger.debug(u'PlexPy APIv2 :: Sorting log based on %s' % sort)
templog = sorted(templog, key=lambda k: k[sort])
if search:
logger.debug(u'PlexPy APIv2 :: Searching log values for %s' % search)
tt = [d for d in templog for k, v in d.items() if search.lower() in v.lower()]
if len(tt):
templog = tt
if regex:
tt = []
for l in templog:
stringdict = ' '.join('{}{}'.format(k, v) for k, v in l.items())
if reg.search(stringdict):
tt.append(l)
if len(tt):
templog = tt
if order == 'desc':
templog = templog[::-1]
self.data = templog
return templog
def get_settings(self, key=''):
""" Fetches all settings from the config file
Args:
key(string, optional): 'Run the it without args to see all args'
Returns:
json:
```
{General: {api_enabled: true, ...}
Advanced: {cache_sizemb: "32", ...}}
```
"""
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
conf = plexpy.CONFIG._config
config = {}
# Truthify the dict
for k, v in conf.iteritems():
if isinstance(v, dict):
d = {}
for kk, vv in v.iteritems():
if vv == '0' or vv == '1':
d[kk] = bool(vv)
else:
d[kk] = vv
config[k] = d
if k == 'General':
config[k]['interface'] = interface_dir
config[k]['interface_list'] = interface_list
if key:
return config.get(key, None)
return config
def sql(self, query=''):
""" Query the db with raw sql, makes backup of
the db if the backup is older then 24h
"""
if not query:
return
# allow the user to shoot them self
# in the foot but not in the head..
if not len(os.listdir(plexpy.BACKUP_DIR)):
self.backupdb()
else:
# If the backup is less then 24 h old lets make a backup
if any([os.path.getctime(os.path.join(plexpy.BACKUP_DIR, file_)) <
(time.time() - 86400) for file_ in os.listdir(plexpy.BACKUP_DIR)]):
self.backupdb()
db = database.MonitorDatabase()
rows = db.select(query)
self.data = rows
return rows
def backupdb(self, cleanup=False):
""" Makes a backup of the db, removes all but the 3 last backups
Args:
cleanup: (bool, optional)
"""
data = database.make_backup(cleanup=cleanup)
if data:
self.result_type = 'success'
else:
self.result_type = 'failed'
return data
def restart(self, **kwargs):
""" Restarts plexpy """
plexpy.SIGNAL = 'restart'
self.msg = 'Restarting plexpy'
self.result_type = 'success'
def update(self, **kwargs):
""" Check for updates on Github """
plexpy.SIGNAL = 'update'
self.msg = 'Updating plexpy'
self.result_type = 'success'
def _api_make_md(self):
""" Tries to make a API.md to simplify the api docs """
head = '''# API Reference\n
The API is still pretty new and needs some serious cleaning up on the backend but should be reasonably functional. There are no error codes yet.
## General structure
The API endpoint is `http://ip:port + HTTP_ROOT + /api?apikey=$apikey&cmd=$command`
Response example
```
{
"response": {
"data": [
{
"loglevel": "INFO",
"msg": "Signal 2 caught, saving and exiting...",
"thread": "MainThread",
"time": "22-sep-2015 01:42:56 "
}
],
"message": null,
"result": "success"
}
}
```
General parameters:
out_type: 'xml',
callback: 'pong',
'debug': 1
## API methods'''
body = ''
doc = self._api_docs(md=True)
for k in sorted(doc):
v = doc.get(k)
body += '### %s\n' % k
body += '' if not v else v + '\n'
body += '\n\n'
result = head + '\n\n' + body
return '<div style="white-space: pre-wrap">' + result + '</div>'
def get_apikey(self, username='', password=''):
""" Fetches apikey
Args:
username(string, optional): Your username
password(string, optional): Your password
Returns:
string: Apikey, args are required if auth is enabled
makes and saves the apikey it does not exist
"""
apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
if plexpy.CONFIG.HTTP_USERNAME and plexpy.CONFIG.HTTP_PASSWORD:
if username == plexpy.HTTP_USERNAME and password == plexpy.CONFIG.HTTP_PASSWORD:
if plexpy.CONFIG.API_KEY:
self.data = plexpy.CONFIG.API_KEY
else:
self.data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
else:
self.msg = 'Authentication is enabled, please add the correct username and password to the parameters'
else:
if plexpy.CONFIG.API_KEY:
self.data = plexpy.CONFIG.API_KEY
else:
# Make a apikey if the doesn't exist
self.data = apikey
plexpy.CONFIG.API_KEY = apikey
plexpy.CONFIG.write()
return self.data
def _api_responds(self, result_type='success', data=None, msg=''):
""" Formats the result to a predefined dict so we can hange it the to
the desired output by _api_out_as """
if data is None:
data = {}
return {"response": {"result": result_type, "message": msg, "data": data}}
def _api_out_as(self, out):
""" Formats the response to the desired output """
if self._api_cmd == 'docs_md':
return out['response']['data']
if self._api_out_type == 'json':
cherrypy.response.headers['Content-Type'] = 'application/json;charset=UTF-8'
try:
if self._api_debug:
out = json.dumps(out, indent=4, sort_keys=True)
else:
out = json.dumps(out)
if self._api_callback is not None:
cherrypy.response.headers['Content-Type'] = 'application/javascript'
# wrap with JSONP call if requested
out = self._api_callback + '(' + out + ');'
# if we fail to generate the output fake an error
except Exception as e:
logger.info(u'PlexPy APIv2 :: ' + traceback.format_exc())
out['message'] = traceback.format_exc()
out['result'] = 'error'
elif self._api_out_type == 'xml':
cherrypy.response.headers['Content-Type'] = 'application/xml'
try:
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to parse xml result')
try:
out['message'] = e
out['result'] = 'error'
out = xmltodict.unparse(out, pretty=True)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to parse xml result error message %s' % e)
out = '''<?xml version="1.0" encoding="utf-8"?>
<response>
<message>%s</message>
<data></data>
<result>error</result>
</response>
''' % e
return out
def _api_run(self, *args, **kwargs):
""" handles the stuff from the handler """
result = {}
logger.debug(u'PlexPy APIv2 :: Original kwargs was %s' % kwargs)
self._api_validate(**kwargs)
if self._api_cmd and self._api_authenticated:
call = getattr(self, self._api_cmd)
# Profile is written to console.
if self._api_profileme:
from profilehooks import profile
call = profile(call, immediate=True)
# We allow this to fail so we get a
# traceback in the browser
if self._api_debug:
result = call(**self._api_kwargs)
else:
try:
result = call(**self._api_kwargs)
except Exception as e:
logger.error(u'PlexPy APIv2 :: Failed to run %s %s %s' % (self._api_cmd, self._api_kwargs, e))
ret = None
# The api decorated function can return different result types.
# convert it to a list/dict before we change it to the users
# wanted output
try:
if isinstance(result, (dict, list)):
ret = result
else:
raise
except:
try:
ret = json.loads(result)
except (ValueError, TypeError):
try:
ret = xmltodict.parse(result, attr_prefix='')
except:
pass
# Fallback if we cant "parse the reponse"
if ret is None:
ret = result
if ret or self._api_result_type == 'success':
# To allow override for restart etc
# if the call returns some data we are gonna assume its a success
self._api_result_type = 'success'
else:
self._api_result_type = 'error'
return self._api_out_as(self._api_responds(result_type=self._api_result_type, msg=self._api_msg, data=ret))

View file

@ -13,20 +13,24 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger
import sqlite3
import os
import plexpy
import time
import sqlite3
import shutil
import threading
import time
import logger
import plexpy
db_lock = threading.Lock()
def drop_session_db():
monitor_db = MonitorDatabase()
monitor_db.action('DROP TABLE sessions')
def clear_history_tables():
logger.debug(u"PlexPy Database :: Deleting all session_history records... No turning back now bub.")
monitor_db = MonitorDatabase()
@ -35,10 +39,49 @@ def clear_history_tables():
monitor_db.action('DELETE FROM session_history_metadata')
monitor_db.action('VACUUM;')
def db_filename(filename="plexpy.db"):
""" Returns the filepath to the db """
return os.path.join(plexpy.DATA_DIR, filename)
def make_backup(cleanup=False):
""" Makes a backup of db, removes all but the last 3 backups """
backupfolder = plexpy.BACKUP_DIR
backup_file = 'plexpy.backup-%s.db' % int(time.time())
backup_file_fp = os.path.join(backupfolder, backup_file)
# In case the user has deleted it manually
if not os.path.exists(backupfolder):
os.makedirs(backupfolder)
db = MonitorDatabase()
db.connection.execute('begin immediate')
shutil.copyfile(db_filename(), backup_file_fp)
db.connection.rollback()
if cleanup:
# Delete all backup files except from the last 3.
for root, dirs, files in os.walk(backupfolder):
if len(files) > 3:
all_files = [os.path.join(root, f) for f in files]
backups_sorted_on_age = sorted(all_files, key=os.path.getctime, reverse=True)
for file_ in backups_sorted_on_age[3:]:
try:
os.remove(file_)
except OSError as e:
logger.error('Failed to delete %s from the backup folder %s' % (file_, e))
if backup_file in os.listdir(backupfolder):
logger.debug('Successfully backup of the %s to %s in %s' % (db_filename(), backup_file, backupfolder))
return True
else:
logger.debug('Failed to make backup of %s to %s in %s' % (db_filename(), backup_file, backupfolder))
return False
def get_cache_size():
# This will protect against typecasting problems produced by empty string and None settings
if not plexpy.CONFIG.CACHE_SIZEMB:
@ -46,6 +89,7 @@ def get_cache_size():
return 0
return int(plexpy.CONFIG.CACHE_SIZEMB)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):

View file

@ -13,23 +13,59 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from operator import itemgetter
from xml.dom import minidom
from IPy import IP
import unicodedata
import plexpy
import datetime
import fnmatch
import shutil
import time
import sys
import re
import os
from functools import wraps
import json
import xmltodict
import os
import math
from operator import itemgetter
import re
import shutil
import socket
import sys
import time
from xml.dom import minidom
import unicodedata
import xmltodict
import plexpy
from api2 import API2
def addtoapi(*dargs, **dkwargs):
""" Helper decorator that adds function to the API class.
is used to reuse as much code as possible
args:
dargs: (string, optional) Used to rename a function
Example:
@addtoapi("i_was_renamed", "im_a_second_alias")
@addtoapi()
"""
def rd(function):
@wraps(function)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
if dargs:
# To rename the function if it sucks.. and
# allow compat with old api.
for n in dargs:
if function.__doc__ and len(function.__doc__):
function.__doc__ = function.__doc__.strip()
setattr(API2, n, function)
return wrapper
if function.__doc__ and len(function.__doc__):
function.__doc__ = function.__doc__.strip()
setattr(API2, function.__name__, function)
return wrapper
return rd
def multikeysort(items, columns):
comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
@ -174,7 +210,7 @@ def human_duration(s, sig='dhms'):
if sig >= 'dh' and h > 0:
h = h + 1 if sig == 'dh' and m >= 30 else h
hd_list.append(str(h) + ' hrs')
if sig >= 'dhm' and m > 0:
m = m + 1 if sig == 'dhm' and s >= 30 else m
hd_list.append(str(m) + ' mins')

View file

@ -16,10 +16,10 @@
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, helpers
from httplib import HTTPSConnection
from httplib import HTTPConnection
import ssl
from plexpy import logger, helpers
class HTTPHandler(object):

View file

@ -19,10 +19,11 @@ from urlparse import urlparse
import plexpy
import urllib2
def get_server_friendly_name():
logger.info(u"PlexPy Pmsconnect :: Requesting name from server...")
server_name = PmsConnect().get_server_pref(pref='FriendlyName')
# If friendly name is blank
if not server_name:
servers_info = PmsConnect().get_servers_info()
@ -30,7 +31,7 @@ def get_server_friendly_name():
if server['machine_identifier'] == plexpy.CONFIG.PMS_IDENTIFIER:
server_name = server['name']
break
if server_name and server_name != plexpy.CONFIG.PMS_NAME:
plexpy.CONFIG.__setattr__('PMS_NAME', server_name)
plexpy.CONFIG.write()
@ -38,6 +39,7 @@ def get_server_friendly_name():
return server_name
def refresh_libraries():
logger.info(u"PlexPy Pmsconnect :: Requesting libraries list refresh...")
@ -71,7 +73,6 @@ def refresh_libraries():
library_keys.append(section['section_id'])
if plexpy.CONFIG.HOME_LIBRARY_CARDS == ['first_run_wizard']:
plexpy.CONFIG.__setattr__('HOME_LIBRARY_CARDS', library_keys)
plexpy.CONFIG.write()
@ -206,7 +207,7 @@ class PmsConnect(object):
proto=self.protocol,
request_type='GET',
output_format=output_format)
return request
def get_childrens_list(self, rating_key='', output_format=''):
@ -223,7 +224,7 @@ class PmsConnect(object):
proto=self.protocol,
request_type='GET',
output_format=output_format)
return request
def get_server_list(self, output_format=''):
@ -300,7 +301,7 @@ class PmsConnect(object):
"""
count = '&X-Plex-Container-Size=' + count if count else ''
uri = '/library/sections/' + section_id + '/' + list_type +'?X-Plex-Container-Start=0' + count + sort_type
uri = '/library/sections/' + section_id + '/' + list_type + '?X-Plex-Container-Start=0' + count + sort_type
request = self.request_handler.make_request(uri=uri,
proto=self.protocol,
request_type='GET',
@ -835,7 +836,7 @@ class PmsConnect(object):
metadata = self.get_metadata_details(str(child_rating_key), get_media_info)
if metadata:
metadata_list.append(metadata['metadata'])
elif get_children and a.getElementsByTagName('Directory'):
dir_main = a.getElementsByTagName('Directory')
metadata_main = [d for d in dir_main if helpers.get_xml_attr(d, 'ratingKey')]
@ -844,7 +845,7 @@ class PmsConnect(object):
metadata = self.get_metadata_children_details(str(child_rating_key), get_children, get_media_info)
if metadata:
metadata_list.extend(metadata['metadata'])
output = {'metadata': metadata_list}
return output
@ -892,7 +893,7 @@ class PmsConnect(object):
metadata['section_type'] = 'track'
metadata_list = {'metadata': metadata}
return metadata_list
def get_current_activity(self):
@ -995,7 +996,7 @@ class PmsConnect(object):
machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier')
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@ -1117,7 +1118,7 @@ class PmsConnect(object):
if helpers.get_xml_attr(session, 'type') == 'episode':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@ -1175,7 +1176,7 @@ class PmsConnect(object):
elif helpers.get_xml_attr(session, 'type') == 'movie':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@ -1233,7 +1234,7 @@ class PmsConnect(object):
elif helpers.get_xml_attr(session, 'type') == 'clip':
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@ -1324,7 +1325,7 @@ class PmsConnect(object):
machine_id = helpers.get_xml_attr(session.getElementsByTagName('Player')[0], 'machineIdentifier')
session_output = {'session_key': helpers.get_xml_attr(session, 'sessionKey'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'section_id': helpers.get_xml_attr(session, 'librarySectionID'),
'media_index': helpers.get_xml_attr(session, 'index'),
'parent_media_index': helpers.get_xml_attr(session, 'parentIndex'),
'art': helpers.get_xml_attr(session, 'art'),
@ -1409,7 +1410,7 @@ class PmsConnect(object):
children_list = {'children_count': '0',
'children_list': []
}
return parent_list
return children_list
result_data = []
@ -1556,7 +1557,7 @@ class PmsConnect(object):
'title': helpers.get_xml_attr(xml_head[0], 'title1'),
'libraries_list': libraries_list
}
return output
def get_library_children_details(self, section_id='', section_type='', list_type='all', count='', rating_key='', get_media_info=False):
@ -1613,15 +1614,15 @@ class PmsConnect(object):
if a.getAttribute('size') == '0':
logger.debug(u"PlexPy Pmsconnect :: No library data.")
childern_list = {'library_count': '0',
'childern_list': []
}
'childern_list': []
}
return childern_list
if rating_key:
library_count = helpers.get_xml_attr(xml_head[0], 'size')
else:
library_count = helpers.get_xml_attr(xml_head[0], 'totalSize')
# Get show/season info from xml_head
item_main = []
@ -1673,7 +1674,7 @@ class PmsConnect(object):
output = {'library_count': library_count,
'childern_list': childern_list
}
return output
def get_library_details(self):
@ -1788,7 +1789,7 @@ class PmsConnect(object):
except Exception as e:
logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_search_result_details: %s." % e)
return []
search_results_count = 0
search_results_list = {'movie': [],
'show': [],
@ -1806,8 +1807,8 @@ class PmsConnect(object):
if totalSize == 0:
logger.debug(u"PlexPy Pmsconnect :: No search results.")
search_results_list = {'results_count': search_results_count,
'results_list': []
}
'results_list': []
}
return search_results_list
for a in xml_head:
@ -1912,7 +1913,7 @@ class PmsConnect(object):
if a.getAttribute('size'):
if a.getAttribute('size') == '0':
return {}
title = helpers.get_xml_attr(a, 'title2')
if a.getElementsByTagName('Directory'):
@ -1957,34 +1958,33 @@ class PmsConnect(object):
if child_rating_key:
key = int(child_index)
children.update({key: {'rating_key': int(child_rating_key)}})
key = int(parent_index) if match_type == 'index' else parent_title
parents.update({key:
parents.update({key:
{'rating_key': int(parent_rating_key),
'children': children}
})
key = 0 if match_type == 'index' else title
key_list = {key:
{'rating_key': int(rating_key),
'children': parents },
'section_id': section_id,
'library_name': library_name
}
key_list = {key: {'rating_key': int(rating_key),
'children': parents},
'section_id': section_id,
'library_name': library_name
}
return key_list
def get_server_response(self):
# Refresh Plex remote access port mapping first
self.put_refresh_reachability()
account_data = self.get_account(output_format='xml')
try:
xml_head = account_data.getElementsByTagName('MyPlex')
except Exception as e:
logger.warn(u"PlexPy Pmsconnect :: Unable to parse XML for get_server_response: %s." % e)
return None
server_response = {}
for a in xml_head:
@ -1993,5 +1993,5 @@ class PmsConnect(object):
'public_address': helpers.get_xml_attr(a, 'publicAddress'),
'public_port': helpers.get_xml_attr(a, 'publicPort')
}
return server_response
return server_response

View file

@ -14,7 +14,7 @@
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users, libraries
from plexpy.helpers import checked, radio, get_ip
from plexpy.helpers import checked, addtoapi, get_ip
from mako.lookup import TemplateLookup
from mako import exceptions
@ -26,6 +26,7 @@ import hashlib
import random
import json
import os
from api2 import API2
try:
# pylint:disable=E0611
@ -101,10 +102,29 @@ class WebInterface(object):
return serve_template(templatename="welcome.html", title="Welcome", config=config)
@cherrypy.expose
def discover(self, token=''):
"""
Returns the servers that you own as a
list of dicts (formatted for selectize)
@addtoapi()
def discover(self, token):
""" Gets all your servers that are published to plextv
Returns:
json:
```
[{"httpsRequired": "0",
"ip": "10.0.0.97",
"value": "10.0.0.97",
"label": "dude-PC",
"clientIdentifier": "1234",
"local": "1", "port": "32400"},
{"httpsRequired": "0",
"ip": "85.167.100.100",
"value": "85.167.100.100",
"label": "dude-PC",
"clientIdentifier": "1234",
"local": "0",
"port": "10294"}
]
```
"""
# Need to set token so result doesn't return http 401
plexpy.CONFIG.__setattr__('PMS_TOKEN', token)
@ -132,7 +152,10 @@ class WebInterface(object):
return serve_template(templatename="index.html", title="Home", config=config)
@cherrypy.expose
@addtoapi()
def get_date_formats(self):
""" Get the date and time formats used by plexpy """
if plexpy.CONFIG.DATE_FORMAT:
date_format = plexpy.CONFIG.DATE_FORMAT
else:
@ -212,7 +235,7 @@ class WebInterface(object):
library_cards = plexpy.CONFIG.HOME_LIBRARY_CARDS
stats_data = data_factory.get_library_stats(library_cards=library_cards)
return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data)
@cherrypy.expose
@ -242,6 +265,7 @@ class WebInterface(object):
return serve_template(templatename="libraries.html", title="Libraries", config=config)
@cherrypy.expose
@addtoapi()
def get_library_list(self, **kwargs):
library_data = libraries.Libraries()
@ -251,10 +275,37 @@ class WebInterface(object):
return json.dumps(library_list)
@cherrypy.expose
@addtoapi()
def get_library_sections(self, **kwargs):
""" Get the library sections from pms
Returns:
json:
```
[{"section_id": 1, "section_name": "Movies"},
{"section_id": 7, "section_name": "Music"},
{"section_id": 2, "section_name": "TV Shows"}
]
```
"""
library_data = libraries.Libraries()
result = library_data.get_sections()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn(u"Unable to retrieve data for get_library_sections.")
@cherrypy.expose
@addtoapi() # should be added manually
def refresh_libraries_list(self, **kwargs):
threading.Thread(target=pmsconnect.refresh_libraries).start()
logger.info(u"Manual libraries list refresh requested.")
@cherrypy.expose
def library(self, section_id=None):
config = {
@ -284,10 +335,11 @@ class WebInterface(object):
else:
result = None
status_message = 'An error occured.'
return serve_template(templatename="edit_library.html", title="Edit Library", data=result, status_message=status_message)
@cherrypy.expose
@addtoapi()
def edit_library(self, section_id=None, **kwargs):
custom_thumb = kwargs.get('custom_thumb', '')
do_notify = kwargs.get('do_notify', 0)
@ -323,10 +375,10 @@ class WebInterface(object):
@cherrypy.expose
def get_library_user_stats(self, section_id=None, **kwargs):
library_data = libraries.Libraries()
result = library_data.get_user_stats(section_id=section_id)
if result:
return serve_template(templatename="library_user_stats.html", data=result, title="Player Stats")
else:
@ -358,12 +410,13 @@ class WebInterface(object):
return serve_template(templatename="library_recently_added.html", data=None, title="Recently Added")
@cherrypy.expose
@addtoapi()
def get_library_media_info(self, section_id=None, section_type=None, rating_key=None, refresh='', **kwargs):
if refresh == 'true':
refresh = True
refresh = True
else:
refresh = False
refresh = False
library_data = libraries.Libraries()
result = library_data.get_datatables_media_info(section_id=section_id,
@ -371,16 +424,17 @@ class WebInterface(object):
rating_key=rating_key,
refresh=refresh,
kwargs=kwargs)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
@cherrypy.expose
@addtoapi()
def get_media_info_file_sizes(self, section_id=None, rating_key=None):
get_file_sizes_hold = plexpy.CONFIG.GET_FILE_SIZES_HOLD
section_ids = set(get_file_sizes_hold['section_ids'])
rating_keys = set(get_file_sizes_hold['rating_keys'])
if (section_id and section_id not in section_ids) or (rating_key and rating_key not in rating_keys):
if section_id:
section_ids.add(section_id)
@ -399,23 +453,25 @@ class WebInterface(object):
plexpy.CONFIG.GET_FILE_SIZES_HOLD = {'section_ids': list(section_ids), 'rating_keys': list(rating_keys)}
else:
result = False
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'success': result})
@cherrypy.expose
@addtoapi()
def get_library_unwatched(self, section_id=None, section_type=None, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_library_children_details(section_id=section_id,
section_type=section_type,
get_media_info=True,
kwargs=kwargs)
section_type=section_type,
get_media_info=True)
# fixed a bug in this one, is this even used?
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
@cherrypy.expose
@addtoapi()
def delete_all_library_history(self, section_id, **kwargs):
library_data = libraries.Libraries()
@ -430,6 +486,7 @@ class WebInterface(object):
return json.dumps({'message': 'no data received'})
@cherrypy.expose
@addtoapi()
def delete_library(self, section_id, **kwargs):
library_data = libraries.Libraries()
@ -444,6 +501,7 @@ class WebInterface(object):
return json.dumps({'message': 'no data received'})
@cherrypy.expose
@addtoapi()
def undelete_library(self, section_id=None, section_name=None, **kwargs):
library_data = libraries.Libraries()
@ -464,6 +522,7 @@ class WebInterface(object):
return json.dumps({'message': 'no data received'})
@cherrypy.expose
@addtoapi()
def update_section_ids(self, **kwargs):
logger.debug(u"Manual database section_id update called.")
@ -476,6 +535,7 @@ class WebInterface(object):
return "Unable to update section_id's in database. See logs for details."
@cherrypy.expose
@addtoapi()
def delete_datatable_media_info_cache(self, section_id, **kwargs):
get_file_sizes_hold = plexpy.CONFIG.GET_FILE_SIZES_HOLD
section_ids = set(get_file_sizes_hold['section_ids'])
@ -514,6 +574,7 @@ class WebInterface(object):
return serve_template(templatename="users.html", title="Users")
@cherrypy.expose
@addtoapi()
def get_user_list(self, **kwargs):
user_data = users.Users()
@ -523,7 +584,9 @@ class WebInterface(object):
return json.dumps(user_list)
@cherrypy.expose
@addtoapi()
def refresh_users_list(self, **kwargs):
""" Refresh a users list in a own thread """
threading.Thread(target=plextv.refresh_users).start()
logger.info(u"Manual users list refresh requested.")
@ -781,6 +844,7 @@ class WebInterface(object):
return "Updated graphs config values."
@cherrypy.expose
@addtoapi()
def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -793,6 +857,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_date.")
@cherrypy.expose
@addtoapi()
def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -805,6 +870,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_dayofweek.")
@cherrypy.expose
@addtoapi()
def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -817,6 +883,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_hourofday.")
@cherrypy.expose
@addtoapi()
def get_plays_per_month(self, y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -829,6 +896,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_per_month.")
@cherrypy.expose
@addtoapi()
def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -841,6 +909,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_top_10_platforms.")
@cherrypy.expose
@addtoapi()
def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -853,6 +922,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_top_10_users.")
@cherrypy.expose
@addtoapi()
def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -865,6 +935,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_stream_type.")
@cherrypy.expose
@addtoapi()
def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -877,6 +948,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_source_resolution.")
@cherrypy.expose
@addtoapi()
def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -889,6 +961,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_plays_by_stream_resolution.")
@cherrypy.expose
@addtoapi()
def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -901,6 +974,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_stream_type_by_top_10_users.")
@cherrypy.expose
@addtoapi()
def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
@ -993,6 +1067,7 @@ class WebInterface(object):
})
@cherrypy.expose
@addtoapi()
def get_plex_log(self, window=1000, **kwargs):
log_lines = []
try:
@ -1129,7 +1204,7 @@ class WebInterface(object):
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT,
"group_history_tables": checked(plexpy.CONFIG.GROUP_HISTORY_TABLES)
}
return serve_template(templatename="settings.html", title="Settings", config=config)
@cherrypy.expose
@ -1137,15 +1212,15 @@ class WebInterface(object):
# Handle the variable config options. Note - keys with False values aren't getting passed
checked_configs = [
"launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github", "get_file_sizes",
"launch_browser", "enable_https", "api_enabled", "freeze_db", "check_github", "get_file_sizes",
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
"movie_notify_enable", "tv_notify_enable", "music_notify_enable", "monitoring_use_websocket",
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause",
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause",
"refresh_libraries_on_startup", "refresh_users_on_startup",
"ip_logging_enable", "movie_logging_enable", "tv_logging_enable", "music_logging_enable",
"pms_is_remote", "home_stats_type", "group_history_tables", "notify_consecutive",
"ip_logging_enable", "movie_logging_enable", "tv_logging_enable", "music_logging_enable",
"pms_is_remote", "home_stats_type", "group_history_tables", "notify_consecutive",
"notify_recently_added", "notify_recently_added_grandparent", "monitor_remote_access"
]
for checked_config in checked_configs:
@ -1231,11 +1306,11 @@ class WebInterface(object):
# Get new server URLs for SSL communications.
if server_changed:
plextv.get_real_pms_url()
# Get new server friendly name.
if server_changed:
pmsconnect.get_server_friendly_name()
# Reconfigure scheduler if intervals changed
if reschedule:
plexpy.initialize_scheduler()
@ -1286,6 +1361,7 @@ class WebInterface(object):
data=this_agent)
@cherrypy.expose
@addtoapi('notify')
def test_notifier(self, config_id=None, subject='PlexPy', body='Test notification', **kwargs):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
@ -1297,7 +1373,7 @@ class WebInterface(object):
break
else:
this_agent = None
if this_agent:
logger.debug(u"Sending test %s notification." % this_agent['name'])
notifiers.send_notification(this_agent['id'], subject, body, **kwargs)
@ -1308,7 +1384,8 @@ class WebInterface(object):
else:
logger.debug(u"Unable to send test notification, no notification agent ID received.")
return "No notification agent ID received."
@cherrypy.expose
def twitterStep1(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
@ -1373,6 +1450,7 @@ class WebInterface(object):
cherrypy.response.status = 200
@cherrypy.expose
@addtoapi()
def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs):
from plexpy import plexwatch_import
@ -1404,6 +1482,7 @@ class WebInterface(object):
return False
@cherrypy.expose
@addtoapi()
def get_server_id(self, hostname=None, port=None, identifier=None, ssl=0, remote=0, **kwargs):
from plexpy import http_handler
@ -1444,7 +1523,17 @@ class WebInterface(object):
return None
@cherrypy.expose
@addtoapi()
def get_server_pref(self, pref=None, **kwargs):
""" Return a specified server preference.
Args:
pref(string): 'name of preference'
Returns:
String: ''
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_pref(pref=pref)
@ -1507,7 +1596,7 @@ class WebInterface(object):
result = pms_connect.get_metadata_details(rating_key=rating_key, get_media_info=True)
if result:
metadata = result['metadata']
if metadata:
return serve_template(templatename="info.html", data=metadata, title="Info", config=config, source=source)
else:
@ -1553,15 +1642,8 @@ class WebInterface(object):
return None
##### Search #####
@cherrypy.expose
def search(self, query=''):
return serve_template(templatename="search.html", title="Search", query=query)
@cherrypy.expose
@addtoapi('search')
def search_results(self, query, **kwargs):
pms_connect = pmsconnect.PmsConnect()
@ -1573,6 +1655,10 @@ class WebInterface(object):
else:
logger.warn(u"Unable to retrieve data for search_results.")
@cherrypy.expose
def search(self, query=''):
return serve_template(templatename="search.html", title="Search", query=query)
@cherrypy.expose
def get_search_results_children(self, query, media_type=None, season_index=None, **kwargs):
@ -1582,7 +1668,7 @@ class WebInterface(object):
if media_type:
result['results_list'] = {media_type: result['results_list'][media_type]}
if media_type == 'season' and season_index:
result['results_list']['season'] = [season for season in result['results_list']['season']
result['results_list']['season'] = [season for season in result['results_list']['season']
if season['media_index'] == season_index]
if result:
@ -1591,10 +1677,6 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_search_results_children.")
return serve_template(templatename="info_search_results_list.html", data=None, title="Search Result List")
##### Update Metadata #####
@cherrypy.expose
def update_metadata(self, rating_key=None, query=None, update=False, **kwargs):
query_string = query
@ -1612,6 +1694,7 @@ class WebInterface(object):
return serve_template(templatename="update_metadata.html", query=query, update=update, title="Info")
@cherrypy.expose
@addtoapi()
def update_metadata_details(self, old_rating_key, new_rating_key, media_type, **kwargs):
data_factory = datafactory.DataFactory()
pms_connect = pmsconnect.PmsConnect()
@ -1631,11 +1714,21 @@ class WebInterface(object):
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': 'no data received'})
# test code
@cherrypy.expose
@addtoapi()
def get_new_rating_keys(self, rating_key='', media_type='', **kwargs):
"""
Grap the new rating keys
Args:
rating_key(string): '',
media_type(string): ''
Returns:
json: ''
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_rating_keys_list(rating_key=rating_key, media_type=media_type)
@ -1647,7 +1740,17 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_new_rating_keys.")
@cherrypy.expose
@addtoapi()
def get_old_rating_keys(self, rating_key='', media_type='', **kwargs):
"""
Grap the old rating keys
Args:
rating_key(string): '',
media_type(string): ''
Returns:
json: ''
"""
data_factory = datafactory.DataFactory()
result = data_factory.get_rating_keys_list(rating_key=rating_key, media_type=media_type)
@ -1659,18 +1762,8 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_old_rating_keys.")
##### API #####
@cherrypy.expose
def api(self, *args, **kwargs):
from plexpy.api import Api
a = Api()
a.checkParams(*args, **kwargs)
return a.fetchData()
@cherrypy.expose
@addtoapi()
def get_pms_sessions_json(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
@ -1696,6 +1789,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_metadata_json.")
@cherrypy.expose
@addtoapi('get_metadata')
def get_metadata_xml(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
@ -1708,7 +1802,17 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_metadata_xml.")
@cherrypy.expose
@addtoapi('get_recently_added')
def get_recently_added_json(self, count='0', **kwargs):
""" Get all items that where recelty added to plex
Args:
count(string): Number of items
Returns:
dict: of all added items
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_recently_added(count, 'json')
@ -1720,19 +1824,9 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_recently_added_json.")
@cherrypy.expose
def get_episode_list_json(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_episode_list(rating_key, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn(u"Unable to retrieve data for get_episode_list_json.")
@cherrypy.expose
@addtoapi()
def get_friends_list(self, **kwargs):
""" Gets the friends list of the server owner for plex.tv """
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_friends('json')
@ -1744,7 +1838,9 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_friends_list.")
@cherrypy.expose
@addtoapi()
def get_user_details(self, **kwargs):
""" Get all details about a user from plextv """
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_user_details('json')
@ -1756,7 +1852,9 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_user_details.")
@cherrypy.expose
@addtoapi()
def get_server_list(self, **kwargs):
""" Find all servers published on plextv"""
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_server_list('json')
@ -1768,6 +1866,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_server_list.")
@cherrypy.expose
@addtoapi()
def get_sync_lists(self, machine_id='', **kwargs):
plex_tv = plextv.PlexTV()
@ -1780,7 +1879,22 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_sync_lists.")
@cherrypy.expose
@addtoapi()
def get_servers(self, **kwargs):
""" All servers
Returns:
json:
```
{"MediaContainer": {"@size": "1", "Server":
{"@name": "dude-PC",
"@host": "10.0.0.97",
"@address": "10.0.0.97",
"@port": "32400",
"@machineIdentifier": "1234",
"@version": "0.9.15.2.1663-7efd046"}}}
```
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_list(output_format='json')
@ -1792,7 +1906,24 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_servers.")
@cherrypy.expose
@addtoapi()
def get_servers_info(self, **kwargs):
""" Graps info about the server
Returns:
json:
```
[{"port": "32400",
"host": "10.0.0.97",
"version": "0.9.15.2.1663-7efd046",
"name": "dude-PC",
"machine_identifier": "1234"
}
]
```
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_servers_info()
@ -1804,6 +1935,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_servers_info.")
@cherrypy.expose
@addtoapi()
def get_server_friendly_name(self, **kwargs):
result = pmsconnect.get_server_friendly_name()
@ -1815,6 +1947,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_server_friendly_name.")
@cherrypy.expose
@addtoapi()
def get_server_prefs(self, pref=None, **kwargs):
if pref:
@ -1830,19 +1963,18 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_server_prefs.")
@cherrypy.expose
def get_library_sections(self, **kwargs):
library_data = libraries.Libraries()
result = library_data.get_sections()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn(u"Unable to retrieve data for get_library_sections.")
@cherrypy.expose
@addtoapi()
def get_activity(self, **kwargs):
""" Return processed and validated session list.
Returns:
json:
```
{stream_count: 1,
session: [{dict}]
}
```
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
@ -1854,7 +1986,22 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_activity.")
@cherrypy.expose
@addtoapi()
def get_full_users_list(self, **kwargs):
""" Get a list all users that has access to your server
Returns:
json:
```
[{"username": "Hellowlol", "user_id": "1345",
"thumb": "https://plex.tv/users/123aa/avatar",
"is_allow_sync": null,
"is_restricted": "0",
"is_home_user": "0",
"email": "John.Doe@email.com"}]
```
"""
plex_tv = plextv.PlexTV()
result = plex_tv.get_full_users_list()
@ -1866,7 +2013,43 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_full_users_list.")
@cherrypy.expose
@addtoapi()
def get_sync_item(self, sync_id, **kwargs):
""" Return sync item details.
Args:
sync_id(string): unique sync id for item
output_format(string, optional): 'xml/json'
Returns:
List:
```
{"data": [
{"username": "username",
"item_downloaded_percent_complete": 100,
"user_id": "134",
"failure": "",
"title": "Some Movie",
"total_size": "747195119",
"root_title": "Movies",
"music_bitrate": "128",
"photo_quality": "49",
"friendly_name": "username",
"device_name": "Username iPad",
"platform": "iOS",
"state": "complete",
"item_downloaded_count": "1",
"content_type": "video",
"metadata_type": "movie",
"video_quality": "49",
"item_count": "1",
"rating_key": "59207",
"item_complete_count": "1",
"sync_id": "1234"}
]
}
```
"""
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sync_item(sync_id, output_format='json')
@ -1878,6 +2061,7 @@ class WebInterface(object):
logger.warn(u"Unable to retrieve data for get_sync_item.")
@cherrypy.expose
@addtoapi()
def get_sync_transcode_queue(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
@ -1889,10 +2073,8 @@ class WebInterface(object):
else:
logger.warn(u"Unable to retrieve data for get_sync_transcode_queue.")
@cherrypy.expose
@addtoapi()
def random_arnold_quotes(self, **kwargs):
from random import randint
quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!',
@ -1921,4 +2103,16 @@ class WebInterface(object):
]
random_number = randint(0, len(quote_list) - 1)
return quote_list[int(random_number)]
return quote_list[int(random_number)]
### API ###
@cherrypy.expose
def api(self, *args, **kwargs):
if args and 'v2' in args[0]:
return API2()._api_run(**kwargs)
else:
from plexpy.api import Api
a = Api()
a.checkParams(*args, **kwargs)
return a.fetchData()

View file

@ -15,12 +15,13 @@
import os
import sys
import cherrypy
import plexpy
import cherrypy
from plexpy import logger
from plexpy.webserve import WebInterface
import plexpy
from plexpy.helpers import create_https_certificates
from plexpy.webserve import WebInterface
def initialize(options):
@ -35,13 +36,11 @@ def initialize(options):
# self-signed ones.
if not (https_cert and os.path.exists(https_cert)) or not (https_key and os.path.exists(https_key)):
if not create_https_certificates(https_cert, https_key):
logger.warn("Unable to create certificate and key. Disabling " \
"HTTPS")
logger.warn("Unable to create certificate and key. Disabling HTTPS")
enable_https = False
if not (os.path.exists(https_cert) and os.path.exists(https_key)):
logger.warn("Disabled HTTPS because of missing certificate and " \
"key.")
logger.warn("Disabled HTTPS because of missing certificate and key.")
enable_https = False
options_dict = {
@ -63,13 +62,17 @@ def initialize(options):
protocol = "http"
logger.info("Starting PlexPy web server on %s://%s:%d/", protocol,
options['http_host'], options['http_port'])
options['http_host'], options['http_port'])
cherrypy.config.update(options_dict)
conf = {
'/': {
'tools.staticdir.root': os.path.join(plexpy.PROG_DIR, 'data'),
'tools.proxy.on': options['http_proxy'] # pay attention to X-Forwarded-Proto header
'tools.proxy.on': options['http_proxy'], # pay attention to X-Forwarded-Proto header
'tools.gzip.on': True,
'tools.gzip.mime_types': ['text/html', 'text/plain', 'text/css',
'text/javascript', 'application/json',
'application/javascript']
},
'/interfaces': {
'tools.staticdir.on': True,
@ -87,15 +90,15 @@ def initialize(options):
'tools.staticdir.on': True,
'tools.staticdir.dir': "js"
},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.join(os.path.abspath(
os.curdir), "images" + os.sep + "favicon.ico")
},
'/cache': {
'tools.staticdir.on': True,
'tools.staticdir.dir': plexpy.CONFIG.CACHE_DIR
},
'/favicon.ico': {
'tools.staticfile.on': True,
'tools.staticfile.filename': os.path.abspath(os.path.join(plexpy.PROG_DIR, 'data/interfaces/default/images/favicon.ico'))
}
}
if options['http_password']: