mirror of
https://github.com/clinton-hall/nzbToMedia.git
synced 2025-08-21 13:53:15 -07:00
Merge pull request #1426 from clinton-hall/feature/Python3
Add Python 3 compatibility
This commit is contained in:
commit
563a6e1ecb
9 changed files with 60 additions and 49 deletions
|
@ -200,7 +200,7 @@ class autoProcessMovie(object):
|
|||
release_status_old = None
|
||||
if release:
|
||||
try:
|
||||
release_id = release.keys()[0]
|
||||
release_id = list(release.keys())[0]
|
||||
media_id = release[release_id]['media_id']
|
||||
download_id = release[release_id]['download_info']['id']
|
||||
downloader = release[release_id]['download_info']['downloader']
|
||||
|
@ -418,7 +418,7 @@ class autoProcessMovie(object):
|
|||
release = None
|
||||
if release:
|
||||
try:
|
||||
release_id = release.keys()[0]
|
||||
release_id = list(release.keys())[0]
|
||||
title = release[release_id]['title']
|
||||
release_status_new = release[release_id]['status']
|
||||
if release_status_old is None: # we didn't have a release before, but now we do.
|
||||
|
|
|
@ -316,6 +316,7 @@ class autoProcessTV(object):
|
|||
else:
|
||||
for line in r.iter_lines():
|
||||
if line:
|
||||
line = line.decode('utf-8')
|
||||
logger.postprocess("{0}".format(line), section)
|
||||
if "Moving file from" in line:
|
||||
inputName = os.path.split(line)[1]
|
||||
|
|
|
@ -23,8 +23,8 @@ def autoFork(section, inputCategory):
|
|||
replace = {'sickrage':'SickRage', 'sickchill':'SickChill', 'sickgear':'SickGear', 'medusa':'Medusa', 'sickbeard-api':'SickBeard-api'}
|
||||
f1 = replace[cfg.get("fork", "auto")] if cfg.get("fork", "auto") in replace else cfg.get("fork", "auto")
|
||||
try:
|
||||
fork = core.FORKS.items()[core.FORKS.keys().index(f1)]
|
||||
except:
|
||||
fork = f1, core.FORKS[f1]
|
||||
except KeyError:
|
||||
fork = "auto"
|
||||
protocol = "https://" if ssl else "http://"
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@ import re
|
|||
import sqlite3
|
||||
import time
|
||||
|
||||
from six import text_type
|
||||
|
||||
import core
|
||||
from core import logger
|
||||
|
||||
|
@ -171,14 +173,16 @@ class DBConnection(object):
|
|||
|
||||
genParams = lambda myDict: ["{key} = ?".format(key=k) for k in myDict.keys()]
|
||||
|
||||
items = list(valueDict.values()) + list(keyDict.values())
|
||||
self.action(
|
||||
"UPDATE {table} "
|
||||
"SET {params} "
|
||||
"WHERE {conditions}".format(
|
||||
table=tableName,
|
||||
params=", ".join(genParams(valueDict)),
|
||||
conditions=" AND ".join(genParams(keyDict))),
|
||||
valueDict.values() + keyDict.values()
|
||||
conditions=" AND ".join(genParams(keyDict))
|
||||
),
|
||||
items
|
||||
)
|
||||
|
||||
if self.connection.total_changes == changesBefore:
|
||||
|
@ -186,10 +190,10 @@ class DBConnection(object):
|
|||
"INSERT OR IGNORE INTO {table} ({columns}) "
|
||||
"VALUES ({values})".format(
|
||||
table=tableName,
|
||||
columns=", ".join(valueDict.keys() + keyDict.keys()),
|
||||
values=", ".join(["?"] * len(valueDict.keys() + keyDict.keys()))
|
||||
)
|
||||
, valueDict.values() + keyDict.values()
|
||||
columns=", ".join(map(text_type, valueDict.keys())),
|
||||
values=", ".join(["?"] * len(valueDict.values()))
|
||||
),
|
||||
list(valueDict.values())
|
||||
)
|
||||
|
||||
def tableInfo(self, tableName):
|
||||
|
|
|
@ -202,8 +202,10 @@ def category_search(inputDirectory, inputName, inputCategory, root, categories):
|
|||
def getDirSize(inputPath):
|
||||
from functools import partial
|
||||
prepend = partial(os.path.join, inputPath)
|
||||
return sum(
|
||||
[(os.path.getsize(f) if os.path.isfile(f) else getDirSize(f)) for f in map(prepend, os.listdir(unicode(inputPath)))])
|
||||
return sum([
|
||||
(os.path.getsize(f) if os.path.isfile(f) else getDirSize(f))
|
||||
for f in map(prepend, os.listdir(text_type(inputPath)))
|
||||
])
|
||||
|
||||
|
||||
def is_minSize(inputName, minSize):
|
||||
|
@ -330,7 +332,7 @@ def removeEmptyFolders(path, removeRoot=True):
|
|||
|
||||
# remove empty subfolders
|
||||
logger.debug("Checking for empty folders in:{0}".format(path))
|
||||
files = os.listdir(unicode(path))
|
||||
files = os.listdir(text_type(path))
|
||||
if len(files):
|
||||
for f in files:
|
||||
fullpath = os.path.join(path, f)
|
||||
|
@ -338,7 +340,7 @@ def removeEmptyFolders(path, removeRoot=True):
|
|||
removeEmptyFolders(fullpath)
|
||||
|
||||
# if folder empty, delete it
|
||||
files = os.listdir(unicode(path))
|
||||
files = os.listdir(text_type(path))
|
||||
if len(files) == 0 and removeRoot:
|
||||
logger.debug("Removing empty folder:{}".format(path))
|
||||
os.rmdir(path)
|
||||
|
@ -417,7 +419,7 @@ def CharReplace(Name):
|
|||
# If there is special character, detects if it is a UTF-8, CP850 or ISO-8859-15 encoding
|
||||
encoded = False
|
||||
encoding = None
|
||||
if isinstance(Name, unicode):
|
||||
if isinstance(Name, text_type):
|
||||
return encoded, Name.encode(core.SYS_ENCODING)
|
||||
for Idx in range(len(Name)):
|
||||
# /!\ detection is done 2char by 2char for UTF-8 special character
|
||||
|
@ -644,9 +646,9 @@ def getDirs(section, subsection, link='hard'):
|
|||
folders = []
|
||||
|
||||
logger.info("Searching {0} for mediafiles to post-process ...".format(path))
|
||||
sync = [o for o in os.listdir(unicode(path)) if os.path.splitext(o)[1] in ['.!sync', '.bts']]
|
||||
sync = [o for o in os.listdir(text_type(path)) if os.path.splitext(o)[1] in ['.!sync', '.bts']]
|
||||
# search for single files and move them into their own folder for post-processing
|
||||
for mediafile in [os.path.join(path, o) for o in os.listdir(unicode(path)) if
|
||||
for mediafile in [os.path.join(path, o) for o in os.listdir(text_type(path)) if
|
||||
os.path.isfile(os.path.join(path, o))]:
|
||||
if len(sync) > 0:
|
||||
break
|
||||
|
@ -710,11 +712,11 @@ def getDirs(section, subsection, link='hard'):
|
|||
|
||||
# removeEmptyFolders(path, removeRoot=False)
|
||||
|
||||
if os.listdir(unicode(path)):
|
||||
for dir in [os.path.join(path, o) for o in os.listdir(unicode(path)) if
|
||||
if os.listdir(text_type(path)):
|
||||
for dir in [os.path.join(path, o) for o in os.listdir(text_type(path)) if
|
||||
os.path.isdir(os.path.join(path, o))]:
|
||||
sync = [o for o in os.listdir(unicode(dir)) if os.path.splitext(o)[1] in ['.!sync', '.bts']]
|
||||
if len(sync) > 0 or len(os.listdir(unicode(dir))) == 0:
|
||||
sync = [o for o in os.listdir(text_type(dir)) if os.path.splitext(o)[1] in ['.!sync', '.bts']]
|
||||
if len(sync) > 0 or len(os.listdir(text_type(dir))) == 0:
|
||||
continue
|
||||
folders.extend([dir])
|
||||
return folders
|
||||
|
@ -765,7 +767,7 @@ def onerror(func, path, exc_info):
|
|||
def rmDir(dirName):
|
||||
logger.info("Deleting {0}".format(dirName))
|
||||
try:
|
||||
shutil.rmtree(unicode(dirName), onerror=onerror)
|
||||
shutil.rmtree(text_type(dirName), onerror=onerror)
|
||||
except:
|
||||
logger.error("Unable to delete folder {0}".format(dirName))
|
||||
|
||||
|
@ -1050,7 +1052,7 @@ def listMediaFiles(path, minSize=0, delete_ignored=0, media=True, audio=True, me
|
|||
|
||||
return files
|
||||
|
||||
for curFile in os.listdir(unicode(path)):
|
||||
for curFile in os.listdir(text_type(path)):
|
||||
fullCurFile = os.path.join(path, curFile)
|
||||
|
||||
# if it's a folder do it recursively
|
||||
|
@ -1087,7 +1089,7 @@ def find_imdbid(dirName, inputName, omdbApiKey):
|
|||
logger.info("Found imdbID [{0}]".format(imdbid))
|
||||
return imdbid
|
||||
if os.path.isdir(dirName):
|
||||
for file in os.listdir(unicode(dirName)):
|
||||
for file in os.listdir(text_type(dirName)):
|
||||
m = re.search('(tt\d{7})', file)
|
||||
if m:
|
||||
imdbid = m.group(1)
|
||||
|
|
|
@ -9,7 +9,7 @@ import shutil
|
|||
import subprocess
|
||||
|
||||
from babelfish import Language
|
||||
from six import iteritems
|
||||
from six import iteritems, text_type, string_types
|
||||
|
||||
import core
|
||||
from core import logger
|
||||
|
@ -117,7 +117,7 @@ def getVideoDetails(videofile, img=None, bitbucket=None):
|
|||
|
||||
|
||||
def buildCommands(file, newDir, movieName, bitbucket):
|
||||
if isinstance(file, basestring):
|
||||
if isinstance(file, string_types):
|
||||
inputFile = file
|
||||
if 'concat:' in file:
|
||||
file = file.split('|')[0].replace('concat:', '')
|
||||
|
@ -134,7 +134,7 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
if ext == core.VEXTENSION and newDir == dir: # we need to change the name to prevent overwriting itself.
|
||||
core.VEXTENSION = '-transcoded{ext}'.format(ext=core.VEXTENSION) # adds '-transcoded.ext'
|
||||
else:
|
||||
img, data = iteritems(file).next()
|
||||
img, data = next(iteritems(file))
|
||||
name = data['name']
|
||||
video_details, result = getVideoDetails(data['files'][0], img, bitbucket)
|
||||
inputFile = '-'
|
||||
|
@ -480,13 +480,15 @@ def buildCommands(file, newDir, movieName, bitbucket):
|
|||
continue
|
||||
command.extend(['-i', subfile])
|
||||
lan = os.path.splitext(os.path.splitext(subfile)[0])[1][1:].split('-')[0]
|
||||
lan = text_type(lan)
|
||||
metlan = None
|
||||
try:
|
||||
if len(lan) == 3:
|
||||
metlan = Language(lan)
|
||||
if len(lan) == 2:
|
||||
metlan = Language.fromalpha2(lan)
|
||||
except: pass
|
||||
except:
|
||||
pass
|
||||
if metlan:
|
||||
meta_cmd.extend(['-metadata:s:s:{x}'.format(x=len(s_mapped) + n),
|
||||
'language={lang}'.format(lang=metlan.alpha3)])
|
||||
|
@ -616,7 +618,7 @@ def processList(List, newDir, bitbucket):
|
|||
if combine:
|
||||
newList.extend(combineCD(combine))
|
||||
for file in newList:
|
||||
if isinstance(file, basestring) and 'concat:' not in file and not os.path.isfile(file):
|
||||
if isinstance(file, string_types) and 'concat:' not in file and not os.path.isfile(file):
|
||||
success = False
|
||||
break
|
||||
if success and newList:
|
||||
|
@ -751,13 +753,13 @@ def Transcode_directory(dirName):
|
|||
return 1, dirName
|
||||
|
||||
for file in List:
|
||||
if isinstance(file, basestring) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
|
||||
if isinstance(file, string_types) and os.path.splitext(file)[1] in core.IGNOREEXTENSIONS:
|
||||
continue
|
||||
command = buildCommands(file, newDir, movieName, bitbucket)
|
||||
newfilePath = command[-1]
|
||||
|
||||
# transcoding files may remove the original file, so make sure to extract subtitles first
|
||||
if core.SEXTRACT and isinstance(file, basestring):
|
||||
if core.SEXTRACT and isinstance(file, string_types):
|
||||
extract_subs(file, newfilePath, bitbucket)
|
||||
|
||||
try: # Try to remove the file that we're transcoding to just in case. (ffmpeg will return an error if it already exists for some reason)
|
||||
|
@ -772,10 +774,10 @@ def Transcode_directory(dirName):
|
|||
print_cmd(command)
|
||||
result = 1 # set result to failed in case call fails.
|
||||
try:
|
||||
if isinstance(file, basestring):
|
||||
if isinstance(file, string_types):
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket)
|
||||
else:
|
||||
img, data = iteritems(file).next()
|
||||
img, data = next(iteritems(file))
|
||||
proc = subprocess.Popen(command, stdout=bitbucket, stderr=bitbucket, stdin=subprocess.PIPE)
|
||||
for vob in data['files']:
|
||||
procin = zip_out(vob, img, bitbucket)
|
||||
|
@ -787,7 +789,7 @@ def Transcode_directory(dirName):
|
|||
except:
|
||||
logger.error("Transcoding of video {0} has failed".format(newfilePath))
|
||||
|
||||
if core.SUBSDIR and result == 0 and isinstance(file, basestring):
|
||||
if core.SUBSDIR and result == 0 and isinstance(file, string_types):
|
||||
for sub in get_subs(file):
|
||||
name = os.path.splitext(os.path.split(file)[1])[0]
|
||||
subname = os.path.split(sub)[1]
|
||||
|
@ -817,7 +819,7 @@ def Transcode_directory(dirName):
|
|||
os.unlink(file)
|
||||
except:
|
||||
pass
|
||||
if not os.listdir(unicode(newDir)): # this is an empty directory and we didn't transcode into it.
|
||||
if not os.listdir(text_type(newDir)): # this is an empty directory and we didn't transcode into it.
|
||||
os.rmdir(newDir)
|
||||
newDir = dirName
|
||||
if not core.PROCESSOUTPUT and core.DUPLICATE: # We postprocess the original files to CP/SB
|
||||
|
|
|
@ -10,7 +10,8 @@ import stat
|
|||
import subprocess
|
||||
import tarfile
|
||||
import traceback
|
||||
import urllib
|
||||
|
||||
from six.moves.urllib.request import urlretrieve
|
||||
|
||||
import core
|
||||
from core import gh_api as github, logger
|
||||
|
@ -451,7 +452,7 @@ class SourceUpdateManager(UpdateManager):
|
|||
# retrieve file
|
||||
logger.log(u"Downloading update from {url!r}".format(url=tar_download_url))
|
||||
tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar')
|
||||
urllib.urlretrieve(tar_download_url, tar_download_path)
|
||||
urlretrieve(tar_download_url, tar_download_path)
|
||||
|
||||
if not os.path.isfile(tar_download_path):
|
||||
logger.log(u"Unable to retrieve new version from {url}, can't update".format
|
||||
|
|
|
@ -28,12 +28,14 @@ class DelugeClient(object):
|
|||
if platform.system() in ('Windows', 'Microsoft'):
|
||||
appDataPath = os.environ.get("APPDATA")
|
||||
if not appDataPath:
|
||||
import _winreg
|
||||
hkey = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
|
||||
"Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders")
|
||||
appDataReg = _winreg.QueryValueEx(hkey, "AppData")
|
||||
from six.moves import winreg
|
||||
hkey = winreg.OpenKey(
|
||||
winreg.HKEY_CURRENT_USER,
|
||||
"Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders",
|
||||
)
|
||||
appDataReg = winreg.QueryValueEx(hkey, "AppData")
|
||||
appDataPath = appDataReg[0]
|
||||
_winreg.CloseKey(hkey)
|
||||
winreg.CloseKey(hkey)
|
||||
|
||||
auth_file = os.path.join(appDataPath, "deluge", "auth")
|
||||
else:
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
# coding=utf8
|
||||
import re
|
||||
import urllib
|
||||
|
||||
from six import StringIO
|
||||
from six import StringIO, iteritems
|
||||
from six.moves.http_cookiejar import CookieJar
|
||||
from six.moves.urllib.request import (
|
||||
HTTPBasicAuthHandler,
|
||||
|
@ -11,7 +10,7 @@ from six.moves.urllib.request import (
|
|||
build_opener,
|
||||
install_opener,
|
||||
)
|
||||
from six.moves.urllib_parse import urljoin
|
||||
from six.moves.urllib_parse import urlencode, urljoin
|
||||
|
||||
from .upload import MultiPartForm
|
||||
|
||||
|
@ -94,7 +93,7 @@ class UTorrentClient(object):
|
|||
|
||||
def setprops(self, hash, **kvpairs):
|
||||
params = [('action', 'setprops'), ('hash', hash)]
|
||||
for k, v in kvpairs.iteritems():
|
||||
for k, v in iteritems(kvpairs):
|
||||
params.append( ("s", k) )
|
||||
params.append( ("v", v) )
|
||||
|
||||
|
@ -114,7 +113,7 @@ class UTorrentClient(object):
|
|||
if filepath is not None:
|
||||
file_handler = open(filepath,'rb')
|
||||
else:
|
||||
file_handler = StringIO.StringIO(bytes)
|
||||
file_handler = StringIO(bytes)
|
||||
|
||||
form.add_file('torrent_file', filename.encode('utf-8'), file_handler)
|
||||
|
||||
|
@ -138,11 +137,11 @@ class UTorrentClient(object):
|
|||
|
||||
def _action(self, params, body=None, content_type=None):
|
||||
#about token, see https://github.com/bittorrent/webui/wiki/TokenSystem
|
||||
url = self.base_url + '?token=' + self.token + '&' + urllib.urlencode(params)
|
||||
url = self.base_url + '?token=' + self.token + '&' + urlencode(params)
|
||||
request = Request(url)
|
||||
|
||||
if body:
|
||||
request.add_data(body)
|
||||
request.data = body
|
||||
request.add_header('Content-length', len(body))
|
||||
if content_type:
|
||||
request.add_header('Content-type', content_type)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue