mirror of
https://github.com/qbittorrent/qBittorrent
synced 2025-08-19 12:59:56 -07:00
Merge pull request #2550 from DoumanAsh/multiprocessor_search
[search engine] Replace threading with multiprocessing
This commit is contained in:
commit
3d40834c57
22 changed files with 1650 additions and 1470 deletions
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.2
|
#VERSION: 2.0
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -25,92 +25,139 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from HTMLParser import HTMLParser
|
||||||
|
from httplib import HTTPConnection as http
|
||||||
|
#qBt
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file
|
||||||
import sgmllib
|
|
||||||
import re
|
|
||||||
|
|
||||||
class extratorrent(object):
|
class extratorrent(object):
|
||||||
url = 'http://extratorrent.cc'
|
""" Search engine class """
|
||||||
name = 'extratorrent'
|
url = 'http://extratorrent.cc'
|
||||||
supported_categories = {'all': '', 'movies': '4', 'tv': '8', 'music': '5', 'games': '3', 'anime': '1', 'software': '7', 'books': '2', 'pictures': '6'}
|
name = 'ExtraTorrent'
|
||||||
|
supported_categories = {'all' : '0',
|
||||||
|
'movies' : '4',
|
||||||
|
'tv' : '8',
|
||||||
|
'music' : '5',
|
||||||
|
'games' : '3',
|
||||||
|
'anime' : '1',
|
||||||
|
'software' : '7',
|
||||||
|
'books' : '2',
|
||||||
|
'pictures' : '6'}
|
||||||
|
|
||||||
def __init__(self):
|
def download_torrent(self, info):
|
||||||
self.results = []
|
""" Downloader """
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
print(download_file(info))
|
||||||
|
|
||||||
def download_torrent(self, info):
|
class MyHtmlParseWithBlackJack(HTMLParser):
|
||||||
print download_file(info)
|
""" Parser class """
|
||||||
|
def __init__(self, list_searches, url):
|
||||||
|
HTMLParser.__init__(self)
|
||||||
|
self.url = url
|
||||||
|
self.list_searches = list_searches
|
||||||
|
self.current_item = None
|
||||||
|
self.cur_item_name = None
|
||||||
|
self.pending_size = False
|
||||||
|
self.next_queries = True
|
||||||
|
self.pending_next_queries = False
|
||||||
|
self.next_queries_set = set()
|
||||||
|
|
||||||
class SimpleSGMLParser(sgmllib.SGMLParser):
|
def handle_starttag(self, tag, attrs):
|
||||||
def __init__(self, results, url, *args):
|
|
||||||
sgmllib.SGMLParser.__init__(self)
|
|
||||||
self.url = url
|
|
||||||
self.td_counter = None
|
|
||||||
self.current_item = None
|
|
||||||
self.start_name = False
|
|
||||||
self.results = results
|
|
||||||
|
|
||||||
def start_a(self, attr):
|
|
||||||
params = dict(attr)
|
|
||||||
#print params
|
|
||||||
if params.has_key('href') and params['href'].startswith("/torrent_download/"):
|
|
||||||
self.current_item = {}
|
|
||||||
self.td_counter = 0
|
|
||||||
self.start_name = False
|
|
||||||
torrent_id = '/'.join(params['href'].split('/')[2:])
|
|
||||||
self.current_item['link']=self.url+'/download/'+torrent_id
|
|
||||||
elif params.has_key('href') and params['href'].startswith("/torrent/") and params['href'].endswith(".html"):
|
|
||||||
self.current_item['desc_link'] = self.url + params['href'].strip()
|
|
||||||
self.start_name = True
|
|
||||||
|
|
||||||
def handle_data(self, data):
|
|
||||||
if self.td_counter == 2:
|
|
||||||
if not self.current_item.has_key('name') and self.start_name:
|
|
||||||
self.current_item['name'] = data.strip()
|
|
||||||
elif self.td_counter == 3:
|
|
||||||
if not self.current_item.has_key('size'):
|
|
||||||
self.current_item['size'] = ''
|
|
||||||
self.current_item['size']+= data.replace(" ", " ").strip()
|
|
||||||
elif self.td_counter == 4:
|
|
||||||
if not self.current_item.has_key('seeds'):
|
|
||||||
self.current_item['seeds'] = ''
|
|
||||||
self.current_item['seeds']+= data.strip()
|
|
||||||
elif self.td_counter == 5:
|
|
||||||
if not self.current_item.has_key('leech'):
|
|
||||||
self.current_item['leech'] = ''
|
|
||||||
self.current_item['leech']+= data.strip()
|
|
||||||
|
|
||||||
def start_td(self,attr):
|
|
||||||
if isinstance(self.td_counter,int):
|
|
||||||
self.td_counter += 1
|
|
||||||
if self.td_counter > 5:
|
|
||||||
self.td_counter = None
|
|
||||||
# Display item
|
|
||||||
if self.current_item:
|
if self.current_item:
|
||||||
self.current_item['engine_url'] = self.url
|
if tag == "a":
|
||||||
if not self.current_item['seeds'].isdigit():
|
params = dict(attrs)
|
||||||
self.current_item['seeds'] = 0
|
link = params['href']
|
||||||
if not self.current_item['leech'].isdigit():
|
|
||||||
self.current_item['leech'] = 0
|
|
||||||
prettyPrinter(self.current_item)
|
|
||||||
self.results.append('a')
|
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
if not link.startswith("/torrent"):
|
||||||
ret = []
|
return
|
||||||
i = 1
|
|
||||||
while True and i<11:
|
if link[8] == "/":
|
||||||
results = []
|
#description
|
||||||
parser = self.SimpleSGMLParser(results, self.url)
|
self.current_item["desc_link"] = "".join((self.url, link))
|
||||||
dat = retrieve_url(self.url+'/advanced_search/?with=%s&s_cat=%s&page=%d'%(what, self.supported_categories[cat], i))
|
#remove view at the beginning
|
||||||
results_re = re.compile('(?s)<table class="tl"><thead>.*')
|
self.current_item["name"] = params["title"][5:].replace("&", "&")
|
||||||
for match in results_re.finditer(dat):
|
self.pending_size = True
|
||||||
res_tab = match.group(0)
|
elif link[8] == "_":
|
||||||
parser.feed(res_tab)
|
#download link
|
||||||
|
link = link.replace("torrent_", "", 1)
|
||||||
|
self.current_item["link"] = "".join((self.url, link))
|
||||||
|
|
||||||
|
elif tag == "td":
|
||||||
|
if self.pending_size:
|
||||||
|
self.cur_item_name = "size"
|
||||||
|
self.current_item["size"] = ""
|
||||||
|
self.pending_size = False
|
||||||
|
|
||||||
|
for attr in attrs:
|
||||||
|
if attr[0] == "class":
|
||||||
|
if attr[1][0] == "s":
|
||||||
|
self.cur_item_name = "seeds"
|
||||||
|
self.current_item["seeds"] = ""
|
||||||
|
elif attr[1][0] == "l":
|
||||||
|
self.cur_item_name = "leech"
|
||||||
|
self.current_item["leech"] = ""
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
elif tag == "tr":
|
||||||
|
for attr in attrs:
|
||||||
|
if attr[0] == "class" and attr[1].startswith("tl"):
|
||||||
|
self.current_item = dict()
|
||||||
|
self.current_item["engine_url"] = self.url
|
||||||
|
break
|
||||||
|
|
||||||
|
elif self.pending_next_queries:
|
||||||
|
if tag == "a":
|
||||||
|
params = dict(attrs)
|
||||||
|
if params["title"] in self.next_queries_set:
|
||||||
|
return
|
||||||
|
self.list_searches.append(params['href'])
|
||||||
|
self.next_queries_set.add(params["title"])
|
||||||
|
if params["title"] == "10":
|
||||||
|
self.pending_next_queries = False
|
||||||
|
else:
|
||||||
|
self.pending_next_queries = False
|
||||||
|
|
||||||
|
elif self.next_queries:
|
||||||
|
if tag == "b" and ("class", "pager_no_link") in attrs:
|
||||||
|
self.next_queries = False
|
||||||
|
self.pending_next_queries = True
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
if self.cur_item_name:
|
||||||
|
temp = self.current_item[self.cur_item_name]
|
||||||
|
self.current_item[self.cur_item_name] = " ".join((temp, data))
|
||||||
|
#Due to utf-8 we need to handle data two times if there is space
|
||||||
|
if not self.cur_item_name == "size":
|
||||||
|
self.cur_item_name = None
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
if self.current_item:
|
||||||
|
if tag == "tr":
|
||||||
|
prettyPrinter(self.current_item)
|
||||||
|
self.current_item = None
|
||||||
|
|
||||||
|
def search(self, what, cat="all"):
|
||||||
|
""" Performs search """
|
||||||
|
connection = http("extratorrent.cc")
|
||||||
|
|
||||||
|
query = "".join(("/search/?new=1&search=", what, "&s_cat=", self.supported_categories[cat]))
|
||||||
|
|
||||||
|
connection.request("GET", query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
if response.status != 200:
|
||||||
|
return
|
||||||
|
|
||||||
|
list_searches = []
|
||||||
|
parser = self.MyHtmlParseWithBlackJack(list_searches, self.url)
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
parser.close()
|
parser.close()
|
||||||
break
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
for search_query in list_searches:
|
||||||
|
connection.request("GET", search_query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
|
parser.close()
|
||||||
|
|
||||||
|
connection.close()
|
||||||
|
return
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.02
|
#VERSION: 1.03
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -36,10 +36,6 @@ class legittorrents(object):
|
||||||
name = 'legittorrents'
|
name = 'legittorrents'
|
||||||
supported_categories = {'all': '', 'movies': '1', 'tv': '13', 'music': '2', 'games': '3', 'anime': '5', 'books': '6'}
|
supported_categories = {'all': '', 'movies': '1', 'tv': '13', 'music': '2', 'games': '3', 'anime': '5', 'books': '6'}
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.results = []
|
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
|
||||||
|
|
||||||
def download_torrent(self, info):
|
def download_torrent(self, info):
|
||||||
print download_file(info)
|
print download_file(info)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.51
|
#VERSION: 2.00
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
#CONTRIBUTORS: Diego de las Heras (diegodelasheras@gmail.com)
|
#CONTRIBUTORS: Diego de las Heras (diegodelasheras@gmail.com)
|
||||||
|
|
||||||
|
@ -26,90 +26,123 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from HTMLParser import HTMLParser
|
||||||
|
from httplib import HTTPConnection as http
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file
|
||||||
import sgmllib
|
|
||||||
import re
|
|
||||||
|
|
||||||
class mininova(object):
|
class mininova(object):
|
||||||
# Mandatory properties
|
""" Search engine class """
|
||||||
url = 'http://www.mininova.org'
|
url = 'http://www.mininova.org'
|
||||||
name = 'Mininova'
|
name = 'Mininova'
|
||||||
supported_categories = {'all': '0', 'movies': '4', 'tv': '8', 'music': '5', 'games': '3', 'anime': '1', 'software': '7', 'pictures': '6', 'books': '2'}
|
supported_categories = {'all' : '0',
|
||||||
|
'movies' : '4',
|
||||||
|
'tv' : '8',
|
||||||
|
'music' : '5',
|
||||||
|
'games' : '3',
|
||||||
|
'anime' : '1',
|
||||||
|
'software' : '7',
|
||||||
|
'pictures' : '6',
|
||||||
|
'books' : '2'}
|
||||||
|
|
||||||
def __init__(self):
|
def download_torrent(self, info):
|
||||||
self.results = []
|
print(download_file(info))
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
|
||||||
|
|
||||||
def download_torrent(self, info):
|
class MyHtmlParseWithBlackJack(HTMLParser):
|
||||||
print download_file(info)
|
""" Parser class """
|
||||||
|
def __init__(self, list_searches, url):
|
||||||
|
HTMLParser.__init__(self)
|
||||||
|
self.list_searches = list_searches
|
||||||
|
self.url = url
|
||||||
|
self.table_results = False
|
||||||
|
self.current_item = None
|
||||||
|
self.cur_item_name = None
|
||||||
|
self.next_queries = True
|
||||||
|
|
||||||
class SimpleSGMLParser(sgmllib.SGMLParser):
|
def handle_starttag_tr(self, _):
|
||||||
def __init__(self, results, url, *args):
|
""" Handler of tr start tag """
|
||||||
sgmllib.SGMLParser.__init__(self)
|
self.current_item = dict()
|
||||||
self.url = url
|
|
||||||
self.td_counter = None
|
|
||||||
self.current_item = None
|
|
||||||
self.results = results
|
|
||||||
|
|
||||||
def start_a(self, attr):
|
def handle_starttag_a(self, attrs):
|
||||||
params = dict(attr)
|
""" Handler of a start tag """
|
||||||
#print params
|
params = dict(attrs)
|
||||||
if params.has_key('href'):
|
link = params["href"]
|
||||||
if params['href'].startswith("/get/"):
|
|
||||||
self.current_item = {}
|
|
||||||
self.td_counter = 0
|
|
||||||
self.current_item['link']=self.url+params['href'].strip()
|
|
||||||
elif params['href'].startswith("/tor/") and self.current_item is not None:
|
|
||||||
self.current_item['desc_link']=self.url+params['href'].strip()
|
|
||||||
|
|
||||||
def handle_data(self, data):
|
if link.startswith("/tor/"):
|
||||||
if self.td_counter == 0:
|
#description
|
||||||
if not self.current_item.has_key('name'):
|
self.current_item["desc_link"] = "".join((self.url, link))
|
||||||
self.current_item['name'] = ''
|
#get download link from description by id
|
||||||
self.current_item['name']+= data
|
self.current_item["link"] = "".join((self.url, "/get/", link[5:-2]))
|
||||||
elif self.td_counter == 1:
|
self.cur_item_name = "name"
|
||||||
if not self.current_item.has_key('size'):
|
self.current_item["name"] = ""
|
||||||
self.current_item['size'] = ''
|
elif self.next_queries and link.startswith("/search"):
|
||||||
self.current_item['size']+= data.strip()
|
if params["title"].startswith("Page"):
|
||||||
elif self.td_counter == 2:
|
self.list_searches.append(link)
|
||||||
if not self.current_item.has_key('seeds'):
|
|
||||||
self.current_item['seeds'] = ''
|
|
||||||
self.current_item['seeds']+= data.strip()
|
|
||||||
elif self.td_counter == 3:
|
|
||||||
if not self.current_item.has_key('leech'):
|
|
||||||
self.current_item['leech'] = ''
|
|
||||||
self.current_item['leech']+= data.strip()
|
|
||||||
|
|
||||||
def start_td(self,attr):
|
def handle_starttag_td(self, attrs):
|
||||||
if isinstance(self.td_counter,int):
|
""" Handler of td start tag """
|
||||||
self.td_counter += 1
|
if ("align", "right") in attrs:
|
||||||
if self.td_counter > 4:
|
if not "size" in self.current_item:
|
||||||
self.td_counter = None
|
self.cur_item_name = "size"
|
||||||
# Display item
|
self.current_item["size"] = ""
|
||||||
if self.current_item:
|
|
||||||
self.current_item['engine_url'] = self.url
|
|
||||||
if not self.current_item['seeds'].isdigit():
|
|
||||||
self.current_item['seeds'] = 0
|
|
||||||
if not self.current_item['leech'].isdigit():
|
|
||||||
self.current_item['leech'] = 0
|
|
||||||
prettyPrinter(self.current_item)
|
|
||||||
self.results.append('a')
|
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
def handle_starttag_span(self, attrs):
|
||||||
ret = []
|
""" Handler of span start tag """
|
||||||
i = 1
|
if ("class", "g") in attrs:
|
||||||
while True and i<11:
|
self.cur_item_name = "seeds"
|
||||||
results = []
|
self.current_item["seeds"] = ""
|
||||||
parser = self.SimpleSGMLParser(results, self.url)
|
elif ("class", "b") in attrs:
|
||||||
dat = retrieve_url(self.url+'/search/%s/%s/seeds/%d'%(what, self.supported_categories[cat], i))
|
self.cur_item_name = "leech"
|
||||||
results_re = re.compile('(?s)<h1>Search results for.*')
|
self.current_item["leech"] = ""
|
||||||
for match in results_re.finditer(dat):
|
|
||||||
res_tab = match.group(0)
|
def handle_starttag(self, tag, attrs):
|
||||||
parser.feed(res_tab)
|
""" Parser's start tag handler """
|
||||||
|
if self.table_results:
|
||||||
|
dispatcher = getattr(self, "_".join(("handle_starttag", tag)), None)
|
||||||
|
if dispatcher:
|
||||||
|
dispatcher(attrs)
|
||||||
|
|
||||||
|
elif tag == "table":
|
||||||
|
self.table_results = ("class", "maintable") in attrs
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
""" Parser's end tag handler """
|
||||||
|
if tag == "tr" and self.current_item:
|
||||||
|
self.current_item["engine_url"] = self.url
|
||||||
|
prettyPrinter(self.current_item)
|
||||||
|
self.current_item = None
|
||||||
|
elif self.cur_item_name:
|
||||||
|
if tag == "a" or tag == "td":
|
||||||
|
self.cur_item_name = None
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
""" Parser's data handler """
|
||||||
|
if self.cur_item_name:
|
||||||
|
temp = self.current_item[self.cur_item_name]
|
||||||
|
self.current_item[self.cur_item_name] = " ".join((temp, data))
|
||||||
|
|
||||||
|
def search(self, what, cat="all"):
|
||||||
|
""" Performs search """
|
||||||
|
connection = http("www.mininova.org")
|
||||||
|
|
||||||
|
query = "/".join(("/search", what, self.supported_categories[cat], "seeds"))
|
||||||
|
|
||||||
|
connection.request("GET", query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
if response.status != 200:
|
||||||
|
return
|
||||||
|
|
||||||
|
list_searches = []
|
||||||
|
parser = self.MyHtmlParseWithBlackJack(list_searches, self.url)
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
parser.close()
|
parser.close()
|
||||||
break
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
parser.next_queries = False
|
||||||
|
for search_query in list_searches:
|
||||||
|
connection.request("GET", search_query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
|
parser.close()
|
||||||
|
|
||||||
|
connection.close()
|
||||||
|
return
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 252 B After Width: | Height: | Size: 951 B |
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.33
|
#VERSION: 1.36
|
||||||
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
|
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
|
||||||
#CONTRIBUTORS: Christophe Dumez (chris@qbittorrent.org)
|
#CONTRIBUTORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
# Bruno Barbieri (brunorex@gmail.com)
|
# Bruno Barbieri (brunorex@gmail.com)
|
||||||
|
@ -28,92 +28,84 @@
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file, retrieve_url
|
||||||
from urllib2 import HTTPError
|
|
||||||
from HTMLParser import HTMLParser
|
|
||||||
import urllib
|
import urllib
|
||||||
import re
|
from HTMLParser import HTMLParser
|
||||||
|
from re import compile as re_compile
|
||||||
|
|
||||||
class torrentreactor(object):
|
class torrentreactor(object):
|
||||||
url = 'http://www.torrentreactor.net'
|
url = 'http://www.torrentreactor.net'
|
||||||
name = 'TorrentReactor.Net'
|
name = 'TorrentReactor'
|
||||||
supported_categories = {'all': '', 'movies': '5', 'tv': '8', 'music': '6', 'games': '3', 'anime': '1', 'software': '2'}
|
supported_categories = {'all': '', 'movies': '5', 'tv': '8', 'music': '6', 'games': '3', 'anime': '1', 'software': '2'}
|
||||||
|
|
||||||
def download_torrent(self, info):
|
def download_torrent(self, info):
|
||||||
print download_file(info)
|
print(download_file(info))
|
||||||
|
|
||||||
class SimpleHTMLParser(HTMLParser):
|
class SimpleHTMLParser(HTMLParser):
|
||||||
def __init__(self, results, url, *args):
|
def __init__(self, results, url, *args):
|
||||||
HTMLParser.__init__(self)
|
HTMLParser.__init__(self)
|
||||||
self.td_counter = None
|
self.td_counter = None
|
||||||
self.current_item = None
|
self.current_item = None
|
||||||
self.results = results
|
self.results = results
|
||||||
self.id = None
|
self.id = None
|
||||||
self.url = url
|
self.url = url
|
||||||
self.dispatcher = { 'a' : self.start_a, 'td' : self.start_td }
|
self.torrents_matcher = re_compile("/torrents/\d+.*")
|
||||||
|
self.dispatcher = { 'a' : self.start_a, 'td' : self.start_td }
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
def handle_starttag(self, tag, attrs):
|
||||||
if tag in self.dispatcher:
|
if tag in self.dispatcher:
|
||||||
self.dispatcher[tag](attrs)
|
self.dispatcher[tag](attrs)
|
||||||
|
|
||||||
def start_a(self, attr):
|
def start_a(self, attr):
|
||||||
params = dict(attr)
|
params = dict(attr)
|
||||||
if re.match("/torrents/\d+.*", params['href']):
|
if self.torrents_matcher.match(params['href']):
|
||||||
self.current_item = {}
|
self.current_item = {}
|
||||||
self.current_item['desc_link'] = self.url+params['href'].strip()
|
self.current_item['desc_link'] = self.url+params['href'].strip()
|
||||||
elif 'torrentreactor.net/download.php' in params['href']:
|
elif 'torrentreactor.net/download.php' in params['href']:
|
||||||
self.td_counter = 0
|
self.td_counter = 0
|
||||||
self.current_item['link'] = params['href'].strip()
|
self.current_item['link'] = params['href'].strip()
|
||||||
self.current_item['name'] = urllib.unquote_plus(params['href'].split('&')[1].split('name=')[1])
|
self.current_item['name'] = urllib.unquote_plus(params['href'].split('&')[1].split('name=')[1])
|
||||||
|
|
||||||
def handle_data(self, data):
|
def handle_data(self, data):
|
||||||
if self.td_counter == 1:
|
if self.td_counter == 1:
|
||||||
if not self.current_item.has_key('size'):
|
if 'size' not in self.current_item:
|
||||||
self.current_item['size'] = ''
|
self.current_item['size'] = ''
|
||||||
self.current_item['size']+= data.strip()
|
self.current_item['size']+= data.strip()
|
||||||
elif self.td_counter == 2:
|
elif self.td_counter == 2:
|
||||||
if not self.current_item.has_key('seeds'):
|
if 'seeds' not in self.current_item:
|
||||||
self.current_item['seeds'] = ''
|
self.current_item['seeds'] = ''
|
||||||
self.current_item['seeds']+= data.strip()
|
self.current_item['seeds']+= data.strip()
|
||||||
elif self.td_counter == 3:
|
elif self.td_counter == 3:
|
||||||
if not self.current_item.has_key('leech'):
|
if 'leech' not in self.current_item:
|
||||||
self.current_item['leech'] = ''
|
self.current_item['leech'] = ''
|
||||||
self.current_item['leech']+= data.strip()
|
self.current_item['leech']+= data.strip()
|
||||||
|
|
||||||
def start_td(self,attr):
|
def start_td(self,attr):
|
||||||
if isinstance(self.td_counter,int):
|
if isinstance(self.td_counter,int):
|
||||||
self.td_counter += 1
|
self.td_counter += 1
|
||||||
if self.td_counter > 3:
|
if self.td_counter > 3:
|
||||||
self.td_counter = None
|
self.td_counter = None
|
||||||
# add item to results
|
# add item to results
|
||||||
if self.current_item:
|
if self.current_item:
|
||||||
self.current_item['engine_url'] = self.url
|
self.current_item['engine_url'] = self.url
|
||||||
if not self.current_item['seeds'].isdigit():
|
if not self.current_item['seeds'].isdigit():
|
||||||
self.current_item['seeds'] = 0
|
self.current_item['seeds'] = 0
|
||||||
if not self.current_item['leech'].isdigit():
|
if not self.current_item['leech'].isdigit():
|
||||||
self.current_item['leech'] = 0
|
self.current_item['leech'] = 0
|
||||||
prettyPrinter(self.current_item)
|
prettyPrinter(self.current_item)
|
||||||
self.has_results = True
|
self.has_results = True
|
||||||
self.results.append('a')
|
self.results.append('a')
|
||||||
|
|
||||||
def __init__(self):
|
def search(self, what, cat='all'):
|
||||||
self.results = []
|
i = 0
|
||||||
self.parser = self.SimpleHTMLParser(self.results, self.url)
|
dat = ''
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
while i < 11:
|
||||||
i = 0
|
results = []
|
||||||
dat = ''
|
parser = self.SimpleHTMLParser(results, self.url)
|
||||||
while True and i<11:
|
dat = retrieve_url('%s/torrent-search/%s/%s?sort=seeders.desc&type=all&period=none&categories=%s'%(self.url, what, (i*35), self.supported_categories[cat]))
|
||||||
results = []
|
parser.feed(dat)
|
||||||
parser = self.SimpleHTMLParser(results, self.url)
|
parser.close()
|
||||||
|
if len(results) <= 0:
|
||||||
try:
|
break
|
||||||
dat = retrieve_url(self.url+'/torrent-search/%s/%s?sort=seeders.desc&type=all&period=none&categories=%s'%(what, (i*35), self.supported_categories[cat]))
|
i += 1
|
||||||
except HTTPError:
|
|
||||||
break
|
|
||||||
|
|
||||||
parser.feed(dat)
|
|
||||||
parser.close()
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 2.13
|
#VERSION: 2.14
|
||||||
#AUTHORS: Diego de las Heras (diegodelasheras@gmail.com)
|
#AUTHORS: Diego de las Heras (diegodelasheras@gmail.com)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -105,7 +105,7 @@ class torrentz(object):
|
||||||
while i < 6:
|
while i < 6:
|
||||||
results_list = []
|
results_list = []
|
||||||
# "what" is already urlencoded
|
# "what" is already urlencoded
|
||||||
html = retrieve_url(self.url + '/any?f=%s&p=%d' % (what, i))
|
html = retrieve_url('%s/any?f=%s&p=%d' % (self.url, what, i))
|
||||||
parser = self.MyHtmlParser(results_list, self.url, trackers)
|
parser = self.MyHtmlParser(results_list, self.url, trackers)
|
||||||
parser.feed(html)
|
parser.feed(html)
|
||||||
parser.close()
|
parser.close()
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
torrentreactor: 1.33
|
|
||||||
mininova: 1.51
|
|
||||||
piratebay: 2.11
|
|
||||||
extratorrent: 1.2
|
extratorrent: 1.2
|
||||||
|
torrentreactor: 1.36
|
||||||
|
mininova: 2.00
|
||||||
|
piratebay: 2.11
|
||||||
|
extratorrent: 2.0
|
||||||
kickasstorrents: 1.26
|
kickasstorrents: 1.26
|
||||||
btdigg: 1.24
|
btdigg: 1.24
|
||||||
legittorrents: 1.02
|
torrentz: 2.14
|
||||||
torrentz: 2.13
|
legittorrents: 1.03
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
#VERSION: 1.32
|
#VERSION: 1.40
|
||||||
|
|
||||||
# Author:
|
# Author:
|
||||||
# Fabien Devaux <fab AT gnux DOT info>
|
# Fabien Devaux <fab AT gnux DOT info>
|
||||||
|
@ -37,16 +37,15 @@
|
||||||
#
|
#
|
||||||
# Licence: BSD
|
# Licence: BSD
|
||||||
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import os
|
|
||||||
import glob
|
|
||||||
import urllib
|
import urllib
|
||||||
|
from os import path
|
||||||
import fix_encoding
|
from glob import glob
|
||||||
|
from sys import argv
|
||||||
|
from multiprocessing import Pool, cpu_count
|
||||||
|
from fix_encoding import fix_encoding
|
||||||
|
|
||||||
THREADED = True
|
THREADED = True
|
||||||
CATEGORIES = ('all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pictures', 'books')
|
CATEGORIES = {'all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pictures', 'books'}
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Every engine should have a "search" method taking
|
# Every engine should have a "search" method taking
|
||||||
|
@ -56,108 +55,129 @@ CATEGORIES = ('all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pic
|
||||||
# As a convention, try to list results by decrasing number of seeds or similar
|
# As a convention, try to list results by decrasing number of seeds or similar
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
supported_engines = []
|
def initialize_engines():
|
||||||
|
""" Import available engines
|
||||||
|
|
||||||
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
Return list of available engines
|
||||||
for engine in engines:
|
"""
|
||||||
e = engine.split(os.sep)[-1][:-3]
|
supported_engines = []
|
||||||
if len(e.strip()) == 0: continue
|
|
||||||
if e.startswith('_'): continue
|
|
||||||
try:
|
|
||||||
exec "from engines.%s import %s"%(e,e)
|
|
||||||
supported_engines.append(e)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def engineToXml(short_name):
|
engines = glob(path.join(path.dirname(__file__), 'engines', '*.py'))
|
||||||
xml = "<%s>\n"%short_name
|
for engine in engines:
|
||||||
exec "engine = %s()"%short_name
|
engi = path.basename(engine).split('.')[0].strip()
|
||||||
xml += "<name>%s</name>\n"%engine.name
|
if len(engi) == 0 or engi.startswith('_'):
|
||||||
xml += "<url>%s</url>\n"%engine.url
|
continue
|
||||||
xml += "<categories>"
|
try:
|
||||||
if hasattr(engine, 'supported_categories'):
|
#import engines.[engine]
|
||||||
supported_categories = engine.supported_categories.keys()
|
engine_module = __import__(".".join(("engines", engi)))
|
||||||
supported_categories.remove('all')
|
#get low-level module
|
||||||
xml += " ".join(supported_categories)
|
engine_module = getattr(engine_module, engi)
|
||||||
xml += "</categories>\n"
|
#bind class name
|
||||||
xml += "</%s>\n"%short_name
|
globals()[engi] = getattr(engine_module, engi)
|
||||||
return xml
|
supported_engines.append(engi)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def displayCapabilities():
|
return supported_engines
|
||||||
"""
|
|
||||||
Display capabilities in XML format
|
|
||||||
<capabilities>
|
|
||||||
<engine_short_name>
|
|
||||||
<name>long name</name>
|
|
||||||
<url>http://example.com</url>
|
|
||||||
<categories>movies music games</categories>
|
|
||||||
</engine_short_name>
|
|
||||||
</capabilities>
|
|
||||||
"""
|
|
||||||
xml = "<capabilities>"
|
|
||||||
for short_name in supported_engines:
|
|
||||||
xml += engineToXml(short_name)
|
|
||||||
xml += "</capabilities>"
|
|
||||||
print xml
|
|
||||||
|
|
||||||
class EngineLauncher(threading.Thread):
|
def engines_to_xml(supported_engines):
|
||||||
def __init__(self, engine, what, cat='all'):
|
""" Generates xml for supported engines """
|
||||||
threading.Thread.__init__(self)
|
tab = " " * 4
|
||||||
self.engine = engine
|
|
||||||
self.what = what
|
|
||||||
self.cat = cat
|
|
||||||
def run(self):
|
|
||||||
if hasattr(self.engine, 'supported_categories'):
|
|
||||||
if self.cat == 'all' or self.cat in self.engine.supported_categories.keys():
|
|
||||||
self.engine.search(self.what, self.cat)
|
|
||||||
elif self.cat == 'all':
|
|
||||||
self.engine.search(self.what)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
for short_name in supported_engines:
|
||||||
# Make sure we enforce utf-8 encoding
|
search_engine = globals()[short_name]()
|
||||||
fix_encoding.fix_encoding()
|
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
supported_categories = ""
|
||||||
raise SystemExit('./nova2.py [all|engine1[,engine2]*] <category> <keywords>\navailable engines: %s'%
|
if hasattr(search_engine, "supported_categories"):
|
||||||
(','.join(supported_engines)))
|
supported_categories = " ".join((key for key in search_engine.supported_categories.keys()
|
||||||
|
if key is not "all"))
|
||||||
|
|
||||||
if len(sys.argv) == 2:
|
yield "".join((tab, "<", short_name, ">\n",
|
||||||
if sys.argv[1] == "--capabilities":
|
tab, tab, "<name>", search_engine.name, "</name>\n",
|
||||||
displayCapabilities()
|
tab, tab, "<url>", search_engine.url, "</url>\n",
|
||||||
sys.exit(0)
|
tab, tab, "<categories>", supported_categories, "</categories>\n",
|
||||||
else:
|
tab, "</", short_name, ">\n"))
|
||||||
raise SystemExit('./nova.py [all|engine1[,engine2]*] <category> <keywords>\navailable engines: %s'%
|
|
||||||
(','.join(supported_engines)))
|
|
||||||
|
|
||||||
engines_list = [e.lower() for e in sys.argv[1].strip().split(',')]
|
def displayCapabilities(supported_engines):
|
||||||
|
"""
|
||||||
|
Display capabilities in XML format
|
||||||
|
<capabilities>
|
||||||
|
<engine_short_name>
|
||||||
|
<name>long name</name>
|
||||||
|
<url>http://example.com</url>
|
||||||
|
<categories>movies music games</categories>
|
||||||
|
</engine_short_name>
|
||||||
|
</capabilities>
|
||||||
|
"""
|
||||||
|
xml = "".join(("<capabilities>\n",
|
||||||
|
"".join(engines_to_xml(supported_engines)),
|
||||||
|
"</capabilities>"))
|
||||||
|
print(xml)
|
||||||
|
|
||||||
if 'all' in engines_list:
|
def run_search(engine_list):
|
||||||
engines_list = supported_engines
|
""" Run search in engine
|
||||||
|
|
||||||
cat = sys.argv[2].lower()
|
@param engine_list List with engine, query and category
|
||||||
|
|
||||||
if cat not in CATEGORIES:
|
@retval False if any exceptions occured
|
||||||
raise SystemExit('Invalid category!')
|
@retval True otherwise
|
||||||
|
"""
|
||||||
|
engine, what, cat = engine_list
|
||||||
|
try:
|
||||||
|
engine = engine()
|
||||||
|
#avoid exceptions due to invalid category
|
||||||
|
if hasattr(engine, 'supported_categories'):
|
||||||
|
cat = cat if cat in engine.supported_categories else "all"
|
||||||
|
engine.search(what, cat)
|
||||||
|
else:
|
||||||
|
engine.search(what)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
what = urllib.quote(' '.join(sys.argv[3:]))
|
def main(args):
|
||||||
|
fix_encoding()
|
||||||
|
supported_engines = initialize_engines()
|
||||||
|
|
||||||
threads = []
|
if not args:
|
||||||
for engine in engines_list:
|
raise SystemExit("./nova2.py [all|engine1[,engine2]*] <category> <keywords>\n"
|
||||||
try:
|
"available engines: %s" % (','.join(supported_engines)))
|
||||||
if THREADED:
|
|
||||||
exec "l = EngineLauncher(%s(), what, cat)"%engine
|
elif args[0] == "--capabilities":
|
||||||
threads.append(l)
|
displayCapabilities(supported_engines)
|
||||||
l.start()
|
return
|
||||||
else:
|
|
||||||
exec "e = %s()"%engine
|
elif len(args) < 3:
|
||||||
if hasattr(engine, 'supported_categories'):
|
raise SystemExit("./nova2.py [all|engine1[,engine2]*] <category> <keywords>\n"
|
||||||
if cat == 'all' or cat in e.supported_categories.keys():
|
"available engines: %s" % (','.join(supported_engines)))
|
||||||
e.search(what, cat)
|
|
||||||
elif self.cat == 'all':
|
#get only unique engines with set
|
||||||
e.search(what)
|
engines_list = set(e.lower() for e in args[0].strip().split(','))
|
||||||
engine().search(what, cat)
|
|
||||||
except:
|
if 'all' in engines_list:
|
||||||
pass
|
engines_list = supported_engines
|
||||||
if THREADED:
|
else:
|
||||||
for t in threads:
|
#discard un-supported engines
|
||||||
t.join()
|
engines_list = [engine for engine in engines_list
|
||||||
|
if engine in supported_engines]
|
||||||
|
|
||||||
|
if not engines_list:
|
||||||
|
#engine list is empty. Nothing to do here
|
||||||
|
return
|
||||||
|
|
||||||
|
cat = args[1].lower()
|
||||||
|
|
||||||
|
if cat not in CATEGORIES:
|
||||||
|
raise SystemExit(" - ".join(('Invalid category', cat)))
|
||||||
|
|
||||||
|
what = urllib.quote(' '.join(args[2:]))
|
||||||
|
|
||||||
|
if THREADED:
|
||||||
|
#child process spawning is controlled min(number of searches, number of cpu)
|
||||||
|
pool = Pool(min(len(engines_list), cpu_count()))
|
||||||
|
pool.map(run_search, ([globals()[engine], what, cat] for engine in engines_list))
|
||||||
|
else:
|
||||||
|
map(run_search, ([globals()[engine], what, cat] for engine in engines_list))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(argv[1:])
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
#VERSION: 1.10
|
#VERSION: 1.20
|
||||||
|
|
||||||
# Author:
|
# Author:
|
||||||
# Christophe DUMEZ (chris@qbittorrent.org)
|
# Christophe DUMEZ (chris@qbittorrent.org)
|
||||||
|
@ -39,26 +39,26 @@ supported_engines = dict()
|
||||||
|
|
||||||
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
||||||
for engine in engines:
|
for engine in engines:
|
||||||
e = engine.split(os.sep)[-1][:-3]
|
e = engine.split(os.sep)[-1][:-3]
|
||||||
if len(e.strip()) == 0: continue
|
if len(e.strip()) == 0: continue
|
||||||
if e.startswith('_'): continue
|
if e.startswith('_'): continue
|
||||||
try:
|
try:
|
||||||
exec "from engines.%s import %s"%(e,e)
|
exec("from engines.%s import %s"%(e,e))
|
||||||
exec "engine_url = %s.url"%e
|
exec("engine_url = %s.url"%e)
|
||||||
supported_engines[engine_url] = e
|
supported_engines[engine_url] = e
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
raise SystemExit('./nova2dl.py engine_url download_parameter')
|
raise SystemExit('./nova2dl.py engine_url download_parameter')
|
||||||
engine_url = sys.argv[1].strip()
|
engine_url = sys.argv[1].strip()
|
||||||
download_param = sys.argv[2].strip()
|
download_param = sys.argv[2].strip()
|
||||||
if engine_url not in supported_engines.keys():
|
if engine_url not in list(supported_engines.keys()):
|
||||||
raise SystemExit('./nova2dl.py: this engine_url was not recognized')
|
raise SystemExit('./nova2dl.py: this engine_url was not recognized')
|
||||||
exec "engine = %s()"%supported_engines[engine_url]
|
exec("engine = %s()"%supported_engines[engine_url])
|
||||||
if hasattr(engine, 'download_torrent'):
|
if hasattr(engine, 'download_torrent'):
|
||||||
engine.download_torrent(download_param)
|
engine.download_torrent(download_param)
|
||||||
else:
|
else:
|
||||||
print download_file(download_param)
|
print(download_file(download_param))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
|
@ -25,45 +25,44 @@
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
import sys, codecs
|
import sys, codecs
|
||||||
|
from io import open
|
||||||
|
|
||||||
# Force UTF-8 printing
|
# Force UTF-8 printing
|
||||||
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
|
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
|
||||||
|
|
||||||
def prettyPrinter(dictionary):
|
def prettyPrinter(dictionary):
|
||||||
# Convert everything to unicode for safe printing
|
dictionary['size'] = anySizeToBytes(dictionary['size'])
|
||||||
for key,value in dictionary.items():
|
outtext = "|".join((dictionary["link"], dictionary["name"].replace("|", " "), str(dictionary["size"]), str(dictionary["seeds"]), str(dictionary["leech"]), dictionary["engine_url"]))
|
||||||
if isinstance(dictionary[key], str):
|
if 'desc_link' in dictionary:
|
||||||
dictionary[key] = unicode(dictionary[key], 'utf-8')
|
outtext = "|".join((outtext, dictionary["desc_link"]))
|
||||||
dictionary['size'] = anySizeToBytes(dictionary['size'])
|
|
||||||
if dictionary.has_key('desc_link'):
|
with open(1, 'w', encoding='utf-8', closefd=False) as utf8_stdout:
|
||||||
print u"%s|%s|%s|%s|%s|%s|%s"%(dictionary['link'],dictionary['name'].replace('|',' '),dictionary['size'],dictionary['seeds'],dictionary['leech'],dictionary['engine_url'],dictionary['desc_link'])
|
utf8_stdout.write(unicode("".join((outtext, "\n"))))
|
||||||
else:
|
|
||||||
print u"%s|%s|%s|%s|%s|%s"%(dictionary['link'],dictionary['name'].replace('|',' '),dictionary['size'],dictionary['seeds'],dictionary['leech'],dictionary['engine_url'])
|
|
||||||
|
|
||||||
def anySizeToBytes(size_string):
|
def anySizeToBytes(size_string):
|
||||||
"""
|
"""
|
||||||
Convert a string like '1 KB' to '1024' (bytes)
|
Convert a string like '1 KB' to '1024' (bytes)
|
||||||
"""
|
"""
|
||||||
# separate integer from unit
|
# separate integer from unit
|
||||||
try:
|
try:
|
||||||
size, unit = size_string.split()
|
size, unit = size_string.split()
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
size = size_string.strip()
|
size = size_string.strip()
|
||||||
unit = ''.join([c for c in size if c.isalpha()])
|
unit = ''.join([c for c in size if c.isalpha()])
|
||||||
if len(unit) > 0:
|
if len(unit) > 0:
|
||||||
size = size[:-len(unit)]
|
size = size[:-len(unit)]
|
||||||
except:
|
except:
|
||||||
return -1
|
return -1
|
||||||
if len(size) == 0:
|
if len(size) == 0:
|
||||||
return -1
|
return -1
|
||||||
size = float(size)
|
size = float(size)
|
||||||
if len(unit) == 0:
|
if len(unit) == 0:
|
||||||
return int(size)
|
return int(size)
|
||||||
short_unit = unit.upper()[0]
|
short_unit = unit.upper()[0]
|
||||||
|
|
||||||
# convert
|
# convert
|
||||||
units_dict = { 'T': 40, 'G': 30, 'M': 20, 'K': 10 }
|
units_dict = {'T': 40, 'G': 30, 'M': 20, 'K': 10}
|
||||||
if units_dict.has_key( short_unit ):
|
if units_dict.has_key(short_unit):
|
||||||
size = size * 2**units_dict[short_unit]
|
size = size * 2**units_dict[short_unit]
|
||||||
return int(size)
|
return int(size)
|
||||||
|
|
|
@ -42,350 +42,350 @@ _defaultproxy = None
|
||||||
_orgsocket = socket.socket
|
_orgsocket = socket.socket
|
||||||
|
|
||||||
class ProxyError(Exception):
|
class ProxyError(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class GeneralProxyError(ProxyError):
|
class GeneralProxyError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks5AuthError(ProxyError):
|
class Socks5AuthError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks5Error(ProxyError):
|
class Socks5Error(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks4Error(ProxyError):
|
class Socks4Error(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class HTTPError(ProxyError):
|
class HTTPError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
_generalerrors = ("success",
|
_generalerrors = ("success",
|
||||||
"invalid data",
|
"invalid data",
|
||||||
"not connected",
|
"not connected",
|
||||||
"not available",
|
"not available",
|
||||||
"bad proxy type",
|
"bad proxy type",
|
||||||
"bad input")
|
"bad input")
|
||||||
|
|
||||||
_socks5errors = ("succeeded",
|
_socks5errors = ("succeeded",
|
||||||
"general SOCKS server failure",
|
"general SOCKS server failure",
|
||||||
"connection not allowed by ruleset",
|
"connection not allowed by ruleset",
|
||||||
"Network unreachable",
|
"Network unreachable",
|
||||||
"Host unreachable",
|
"Host unreachable",
|
||||||
"Connection refused",
|
"Connection refused",
|
||||||
"TTL expired",
|
"TTL expired",
|
||||||
"Command not supported",
|
"Command not supported",
|
||||||
"Address type not supported",
|
"Address type not supported",
|
||||||
"Unknown error")
|
"Unknown error")
|
||||||
|
|
||||||
_socks5autherrors = ("succeeded",
|
_socks5autherrors = ("succeeded",
|
||||||
"authentication is required",
|
"authentication is required",
|
||||||
"all offered authentication methods were rejected",
|
"all offered authentication methods were rejected",
|
||||||
"unknown username or invalid password",
|
"unknown username or invalid password",
|
||||||
"unknown error")
|
"unknown error")
|
||||||
|
|
||||||
_socks4errors = ("request granted",
|
_socks4errors = ("request granted",
|
||||||
"request rejected or failed",
|
"request rejected or failed",
|
||||||
"request rejected because SOCKS server cannot connect to identd on the client",
|
"request rejected because SOCKS server cannot connect to identd on the client",
|
||||||
"request rejected because the client program and identd report different user-ids",
|
"request rejected because the client program and identd report different user-ids",
|
||||||
"unknown error")
|
"unknown error")
|
||||||
|
|
||||||
def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
||||||
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||||
Sets a default proxy which all further socksocket objects will use,
|
Sets a default proxy which all further socksocket objects will use,
|
||||||
unless explicitly changed.
|
unless explicitly changed.
|
||||||
"""
|
"""
|
||||||
global _defaultproxy
|
global _defaultproxy
|
||||||
_defaultproxy = (proxytype,addr,port,rdns,username,password)
|
_defaultproxy = (proxytype,addr,port,rdns,username,password)
|
||||||
|
|
||||||
class socksocket(socket.socket):
|
class socksocket(socket.socket):
|
||||||
"""socksocket([family[, type[, proto]]]) -> socket object
|
"""socksocket([family[, type[, proto]]]) -> socket object
|
||||||
|
|
||||||
Open a SOCKS enabled socket. The parameters are the same as
|
Open a SOCKS enabled socket. The parameters are the same as
|
||||||
those of the standard socket init. In order for SOCKS to work,
|
those of the standard socket init. In order for SOCKS to work,
|
||||||
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
|
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
|
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
|
||||||
_orgsocket.__init__(self,family,type,proto,_sock)
|
_orgsocket.__init__(self,family,type,proto,_sock)
|
||||||
if _defaultproxy != None:
|
if _defaultproxy != None:
|
||||||
self.__proxy = _defaultproxy
|
self.__proxy = _defaultproxy
|
||||||
else:
|
else:
|
||||||
self.__proxy = (None, None, None, None, None, None)
|
self.__proxy = (None, None, None, None, None, None)
|
||||||
self.__proxysockname = None
|
self.__proxysockname = None
|
||||||
self.__proxypeername = None
|
self.__proxypeername = None
|
||||||
|
|
||||||
def __recvall(self, bytes):
|
def __recvall(self, bytes):
|
||||||
"""__recvall(bytes) -> data
|
"""__recvall(bytes) -> data
|
||||||
Receive EXACTLY the number of bytes requested from the socket.
|
Receive EXACTLY the number of bytes requested from the socket.
|
||||||
Blocks until the required number of bytes have been received.
|
Blocks until the required number of bytes have been received.
|
||||||
"""
|
"""
|
||||||
data = ""
|
data = ""
|
||||||
while len(data) < bytes:
|
while len(data) < bytes:
|
||||||
d = self.recv(bytes-len(data))
|
d = self.recv(bytes-len(data))
|
||||||
if not d:
|
if not d:
|
||||||
raise GeneralProxyError("connection closed unexpectedly")
|
raise GeneralProxyError("connection closed unexpectedly")
|
||||||
data = data + d
|
data = data + d
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
||||||
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||||
Sets the proxy to be used.
|
Sets the proxy to be used.
|
||||||
proxytype - The type of the proxy to be used. Three types
|
proxytype - The type of the proxy to be used. Three types
|
||||||
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
|
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
|
||||||
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
|
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
|
||||||
addr - The address of the server (IP or DNS).
|
addr - The address of the server (IP or DNS).
|
||||||
port - The port of the server. Defaults to 1080 for SOCKS
|
port - The port of the server. Defaults to 1080 for SOCKS
|
||||||
servers and 8080 for HTTP proxy servers.
|
servers and 8080 for HTTP proxy servers.
|
||||||
rdns - Should DNS queries be preformed on the remote side
|
rdns - Should DNS queries be preformed on the remote side
|
||||||
(rather than the local side). The default is True.
|
(rather than the local side). The default is True.
|
||||||
Note: This has no effect with SOCKS4 servers.
|
Note: This has no effect with SOCKS4 servers.
|
||||||
username - Username to authenticate with to the server.
|
username - Username to authenticate with to the server.
|
||||||
The default is no authentication.
|
The default is no authentication.
|
||||||
password - Password to authenticate with to the server.
|
password - Password to authenticate with to the server.
|
||||||
Only relevant when username is also provided.
|
Only relevant when username is also provided.
|
||||||
"""
|
"""
|
||||||
self.__proxy = (proxytype,addr,port,rdns,username,password)
|
self.__proxy = (proxytype,addr,port,rdns,username,password)
|
||||||
|
|
||||||
def __negotiatesocks5(self,destaddr,destport):
|
def __negotiatesocks5(self,destaddr,destport):
|
||||||
"""__negotiatesocks5(self,destaddr,destport)
|
"""__negotiatesocks5(self,destaddr,destport)
|
||||||
Negotiates a connection through a SOCKS5 server.
|
Negotiates a connection through a SOCKS5 server.
|
||||||
"""
|
"""
|
||||||
# First we'll send the authentication packages we support.
|
# First we'll send the authentication packages we support.
|
||||||
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
|
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
|
||||||
# The username/password details were supplied to the
|
# The username/password details were supplied to the
|
||||||
# setproxy method so we support the USERNAME/PASSWORD
|
# setproxy method so we support the USERNAME/PASSWORD
|
||||||
# authentication (in addition to the standard none).
|
# authentication (in addition to the standard none).
|
||||||
self.sendall("\x05\x02\x00\x02")
|
self.sendall("\x05\x02\x00\x02")
|
||||||
else:
|
else:
|
||||||
# No username/password were entered, therefore we
|
# No username/password were entered, therefore we
|
||||||
# only support connections with no authentication.
|
# only support connections with no authentication.
|
||||||
self.sendall("\x05\x01\x00")
|
self.sendall("\x05\x01\x00")
|
||||||
# We'll receive the server's response to determine which
|
# We'll receive the server's response to determine which
|
||||||
# method was selected
|
# method was selected
|
||||||
chosenauth = self.__recvall(2)
|
chosenauth = self.__recvall(2)
|
||||||
if chosenauth[0] != "\x05":
|
if chosenauth[0] != "\x05":
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
# Check the chosen authentication method
|
# Check the chosen authentication method
|
||||||
if chosenauth[1] == "\x00":
|
if chosenauth[1] == "\x00":
|
||||||
# No authentication is required
|
# No authentication is required
|
||||||
pass
|
pass
|
||||||
elif chosenauth[1] == "\x02":
|
elif chosenauth[1] == "\x02":
|
||||||
# Okay, we need to perform a basic username/password
|
# Okay, we need to perform a basic username/password
|
||||||
# authentication.
|
# authentication.
|
||||||
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
|
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
|
||||||
authstat = self.__recvall(2)
|
authstat = self.__recvall(2)
|
||||||
if authstat[0] != "\x01":
|
if authstat[0] != "\x01":
|
||||||
# Bad response
|
# Bad response
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if authstat[1] != "\x00":
|
if authstat[1] != "\x00":
|
||||||
# Authentication failed
|
# Authentication failed
|
||||||
self.close()
|
self.close()
|
||||||
raise Socks5AuthError,((3,_socks5autherrors[3]))
|
raise Socks5AuthError,((3,_socks5autherrors[3]))
|
||||||
# Authentication succeeded
|
# Authentication succeeded
|
||||||
else:
|
else:
|
||||||
# Reaching here is always bad
|
# Reaching here is always bad
|
||||||
self.close()
|
self.close()
|
||||||
if chosenauth[1] == "\xFF":
|
if chosenauth[1] == "\xFF":
|
||||||
raise Socks5AuthError((2,_socks5autherrors[2]))
|
raise Socks5AuthError((2,_socks5autherrors[2]))
|
||||||
else:
|
else:
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
# Now we can request the actual connection
|
# Now we can request the actual connection
|
||||||
req = "\x05\x01\x00"
|
req = "\x05\x01\x00"
|
||||||
# If the given destination address is an IP address, we'll
|
# If the given destination address is an IP address, we'll
|
||||||
# use the IPv4 address request even if remote resolving was specified.
|
# use the IPv4 address request even if remote resolving was specified.
|
||||||
try:
|
try:
|
||||||
ipaddr = socket.inet_aton(destaddr)
|
ipaddr = socket.inet_aton(destaddr)
|
||||||
req = req + "\x01" + ipaddr
|
req = req + "\x01" + ipaddr
|
||||||
except socket.error:
|
except socket.error:
|
||||||
# Well it's not an IP number, so it's probably a DNS name.
|
# Well it's not an IP number, so it's probably a DNS name.
|
||||||
if self.__proxy[3]==True:
|
if self.__proxy[3]==True:
|
||||||
# Resolve remotely
|
# Resolve remotely
|
||||||
ipaddr = None
|
ipaddr = None
|
||||||
req = req + "\x03" + chr(len(destaddr)) + destaddr
|
req = req + "\x03" + chr(len(destaddr)) + destaddr
|
||||||
else:
|
else:
|
||||||
# Resolve locally
|
# Resolve locally
|
||||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||||
req = req + "\x01" + ipaddr
|
req = req + "\x01" + ipaddr
|
||||||
req = req + struct.pack(">H",destport)
|
req = req + struct.pack(">H",destport)
|
||||||
self.sendall(req)
|
self.sendall(req)
|
||||||
# Get the response
|
# Get the response
|
||||||
resp = self.__recvall(4)
|
resp = self.__recvall(4)
|
||||||
if resp[0] != "\x05":
|
if resp[0] != "\x05":
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
elif resp[1] != "\x00":
|
elif resp[1] != "\x00":
|
||||||
# Connection failed
|
# Connection failed
|
||||||
self.close()
|
self.close()
|
||||||
if ord(resp[1])<=8:
|
if ord(resp[1])<=8:
|
||||||
raise Socks5Error((ord(resp[1]),_generalerrors[ord(resp[1])]))
|
raise Socks5Error((ord(resp[1]),_generalerrors[ord(resp[1])]))
|
||||||
else:
|
else:
|
||||||
raise Socks5Error((9,_generalerrors[9]))
|
raise Socks5Error((9,_generalerrors[9]))
|
||||||
# Get the bound address/port
|
# Get the bound address/port
|
||||||
elif resp[3] == "\x01":
|
elif resp[3] == "\x01":
|
||||||
boundaddr = self.__recvall(4)
|
boundaddr = self.__recvall(4)
|
||||||
elif resp[3] == "\x03":
|
elif resp[3] == "\x03":
|
||||||
resp = resp + self.recv(1)
|
resp = resp + self.recv(1)
|
||||||
boundaddr = self.__recvall(ord(resp[4]))
|
boundaddr = self.__recvall(ord(resp[4]))
|
||||||
else:
|
else:
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
boundport = struct.unpack(">H",self.__recvall(2))[0]
|
boundport = struct.unpack(">H",self.__recvall(2))[0]
|
||||||
self.__proxysockname = (boundaddr,boundport)
|
self.__proxysockname = (boundaddr,boundport)
|
||||||
if ipaddr != None:
|
if ipaddr != None:
|
||||||
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
||||||
else:
|
else:
|
||||||
self.__proxypeername = (destaddr,destport)
|
self.__proxypeername = (destaddr,destport)
|
||||||
|
|
||||||
def getproxysockname(self):
|
def getproxysockname(self):
|
||||||
"""getsockname() -> address info
|
"""getsockname() -> address info
|
||||||
Returns the bound IP address and port number at the proxy.
|
Returns the bound IP address and port number at the proxy.
|
||||||
"""
|
"""
|
||||||
return self.__proxysockname
|
return self.__proxysockname
|
||||||
|
|
||||||
def getproxypeername(self):
|
def getproxypeername(self):
|
||||||
"""getproxypeername() -> address info
|
"""getproxypeername() -> address info
|
||||||
Returns the IP and port number of the proxy.
|
Returns the IP and port number of the proxy.
|
||||||
"""
|
"""
|
||||||
return _orgsocket.getpeername(self)
|
return _orgsocket.getpeername(self)
|
||||||
|
|
||||||
def getpeername(self):
|
def getpeername(self):
|
||||||
"""getpeername() -> address info
|
"""getpeername() -> address info
|
||||||
Returns the IP address and port number of the destination
|
Returns the IP address and port number of the destination
|
||||||
machine (note: getproxypeername returns the proxy)
|
machine (note: getproxypeername returns the proxy)
|
||||||
"""
|
"""
|
||||||
return self.__proxypeername
|
return self.__proxypeername
|
||||||
|
|
||||||
def __negotiatesocks4(self,destaddr,destport):
|
def __negotiatesocks4(self,destaddr,destport):
|
||||||
"""__negotiatesocks4(self,destaddr,destport)
|
"""__negotiatesocks4(self,destaddr,destport)
|
||||||
Negotiates a connection through a SOCKS4 server.
|
Negotiates a connection through a SOCKS4 server.
|
||||||
"""
|
"""
|
||||||
# Check if the destination address provided is an IP address
|
# Check if the destination address provided is an IP address
|
||||||
rmtrslv = False
|
rmtrslv = False
|
||||||
try:
|
try:
|
||||||
ipaddr = socket.inet_aton(destaddr)
|
ipaddr = socket.inet_aton(destaddr)
|
||||||
except socket.error:
|
except socket.error:
|
||||||
# It's a DNS name. Check where it should be resolved.
|
# It's a DNS name. Check where it should be resolved.
|
||||||
if self.__proxy[3]==True:
|
if self.__proxy[3]==True:
|
||||||
ipaddr = "\x00\x00\x00\x01"
|
ipaddr = "\x00\x00\x00\x01"
|
||||||
rmtrslv = True
|
rmtrslv = True
|
||||||
else:
|
else:
|
||||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||||
# Construct the request packet
|
# Construct the request packet
|
||||||
req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
|
req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
|
||||||
# The username parameter is considered userid for SOCKS4
|
# The username parameter is considered userid for SOCKS4
|
||||||
if self.__proxy[4] != None:
|
if self.__proxy[4] != None:
|
||||||
req = req + self.__proxy[4]
|
req = req + self.__proxy[4]
|
||||||
req = req + "\x00"
|
req = req + "\x00"
|
||||||
# DNS name if remote resolving is required
|
# DNS name if remote resolving is required
|
||||||
# NOTE: This is actually an extension to the SOCKS4 protocol
|
# NOTE: This is actually an extension to the SOCKS4 protocol
|
||||||
# called SOCKS4A and may not be supported in all cases.
|
# called SOCKS4A and may not be supported in all cases.
|
||||||
if rmtrslv==True:
|
if rmtrslv==True:
|
||||||
req = req + destaddr + "\x00"
|
req = req + destaddr + "\x00"
|
||||||
self.sendall(req)
|
self.sendall(req)
|
||||||
# Get the response from the server
|
# Get the response from the server
|
||||||
resp = self.__recvall(8)
|
resp = self.__recvall(8)
|
||||||
if resp[0] != "\x00":
|
if resp[0] != "\x00":
|
||||||
# Bad data
|
# Bad data
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if resp[1] != "\x5A":
|
if resp[1] != "\x5A":
|
||||||
# Server returned an error
|
# Server returned an error
|
||||||
self.close()
|
self.close()
|
||||||
if ord(resp[1]) in (91,92,93):
|
if ord(resp[1]) in (91,92,93):
|
||||||
self.close()
|
self.close()
|
||||||
raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
|
raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
|
||||||
else:
|
else:
|
||||||
raise Socks4Error((94,_socks4errors[4]))
|
raise Socks4Error((94,_socks4errors[4]))
|
||||||
# Get the bound address/port
|
# Get the bound address/port
|
||||||
self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
|
self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
|
||||||
if rmtrslv != None:
|
if rmtrslv != None:
|
||||||
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
||||||
else:
|
else:
|
||||||
self.__proxypeername = (destaddr,destport)
|
self.__proxypeername = (destaddr,destport)
|
||||||
|
|
||||||
def __negotiatehttp(self,destaddr,destport):
|
def __negotiatehttp(self,destaddr,destport):
|
||||||
"""__negotiatehttp(self,destaddr,destport)
|
"""__negotiatehttp(self,destaddr,destport)
|
||||||
Negotiates a connection through an HTTP server.
|
Negotiates a connection through an HTTP server.
|
||||||
"""
|
"""
|
||||||
# If we need to resolve locally, we do this now
|
# If we need to resolve locally, we do this now
|
||||||
if self.__proxy[3] == False:
|
if self.__proxy[3] == False:
|
||||||
addr = socket.gethostbyname(destaddr)
|
addr = socket.gethostbyname(destaddr)
|
||||||
else:
|
else:
|
||||||
addr = destaddr
|
addr = destaddr
|
||||||
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
|
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
|
||||||
# We read the response until we get the string "\r\n\r\n"
|
# We read the response until we get the string "\r\n\r\n"
|
||||||
resp = self.recv(1)
|
resp = self.recv(1)
|
||||||
while resp.find("\r\n\r\n")==-1:
|
while resp.find("\r\n\r\n")==-1:
|
||||||
resp = resp + self.recv(1)
|
resp = resp + self.recv(1)
|
||||||
# We just need the first line to check if the connection
|
# We just need the first line to check if the connection
|
||||||
# was successful
|
# was successful
|
||||||
statusline = resp.splitlines()[0].split(" ",2)
|
statusline = resp.splitlines()[0].split(" ",2)
|
||||||
if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
|
if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
try:
|
try:
|
||||||
statuscode = int(statusline[1])
|
statuscode = int(statusline[1])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if statuscode != 200:
|
if statuscode != 200:
|
||||||
self.close()
|
self.close()
|
||||||
raise HTTPError((statuscode,statusline[2]))
|
raise HTTPError((statuscode,statusline[2]))
|
||||||
self.__proxysockname = ("0.0.0.0",0)
|
self.__proxysockname = ("0.0.0.0",0)
|
||||||
self.__proxypeername = (addr,destport)
|
self.__proxypeername = (addr,destport)
|
||||||
|
|
||||||
def connect(self,destpair):
|
def connect(self,destpair):
|
||||||
"""connect(self,despair)
|
"""connect(self,despair)
|
||||||
Connects to the specified destination through a proxy.
|
Connects to the specified destination through a proxy.
|
||||||
destpar - A tuple of the IP/DNS address and the port number.
|
destpar - A tuple of the IP/DNS address and the port number.
|
||||||
(identical to socket's connect).
|
(identical to socket's connect).
|
||||||
To select the proxy server use setproxy().
|
To select the proxy server use setproxy().
|
||||||
"""
|
"""
|
||||||
# Do a minimal input check first
|
# Do a minimal input check first
|
||||||
if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
|
if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
|
||||||
raise GeneralProxyError((5,_generalerrors[5]))
|
raise GeneralProxyError((5,_generalerrors[5]))
|
||||||
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
|
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 1080
|
portnum = 1080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatesocks5(destpair[0],destpair[1])
|
self.__negotiatesocks5(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
|
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 1080
|
portnum = 1080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatesocks4(destpair[0],destpair[1])
|
self.__negotiatesocks4(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == PROXY_TYPE_HTTP:
|
elif self.__proxy[0] == PROXY_TYPE_HTTP:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 8080
|
portnum = 8080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatehttp(destpair[0],destpair[1])
|
self.__negotiatehttp(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == None:
|
elif self.__proxy[0] == None:
|
||||||
_orgsocket.connect(self,(destpair[0],destpair[1]))
|
_orgsocket.connect(self,(destpair[0],destpair[1]))
|
||||||
else:
|
else:
|
||||||
raise GeneralProxyError((4,_generalerrors[4]))
|
raise GeneralProxyError((4,_generalerrors[4]))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.2
|
#VERSION: 2.0
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -25,92 +25,139 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from html.parser import HTMLParser
|
||||||
|
from http.client import HTTPConnection as http
|
||||||
|
#qBt
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file
|
||||||
import sgmllib3
|
|
||||||
import re
|
|
||||||
|
|
||||||
class extratorrent(object):
|
class extratorrent(object):
|
||||||
url = 'http://extratorrent.cc'
|
""" Search engine class """
|
||||||
name = 'extratorrent'
|
url = 'http://extratorrent.cc'
|
||||||
supported_categories = {'all': '', 'movies': '4', 'tv': '8', 'music': '5', 'games': '3', 'anime': '1', 'software': '7', 'books': '2', 'pictures': '6'}
|
name = 'ExtraTorrent'
|
||||||
|
supported_categories = {'all' : '0',
|
||||||
|
'movies' : '4',
|
||||||
|
'tv' : '8',
|
||||||
|
'music' : '5',
|
||||||
|
'games' : '3',
|
||||||
|
'anime' : '1',
|
||||||
|
'software' : '7',
|
||||||
|
'books' : '2',
|
||||||
|
'pictures' : '6'}
|
||||||
|
|
||||||
def __init__(self):
|
def download_torrent(self, info):
|
||||||
self.results = []
|
""" Downloader """
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
print(download_file(info))
|
||||||
|
|
||||||
def download_torrent(self, info):
|
class MyHtmlParseWithBlackJack(HTMLParser):
|
||||||
print(download_file(info))
|
""" Parser class """
|
||||||
|
def __init__(self, list_searches, url):
|
||||||
|
HTMLParser.__init__(self)
|
||||||
|
self.url = url
|
||||||
|
self.list_searches = list_searches
|
||||||
|
self.current_item = None
|
||||||
|
self.cur_item_name = None
|
||||||
|
self.pending_size = False
|
||||||
|
self.next_queries = True
|
||||||
|
self.pending_next_queries = False
|
||||||
|
self.next_queries_set = set()
|
||||||
|
|
||||||
class SimpleSGMLParser(sgmllib3.SGMLParser):
|
def handle_starttag(self, tag, attrs):
|
||||||
def __init__(self, results, url, *args):
|
|
||||||
sgmllib3.SGMLParser.__init__(self)
|
|
||||||
self.url = url
|
|
||||||
self.td_counter = None
|
|
||||||
self.current_item = None
|
|
||||||
self.start_name = False
|
|
||||||
self.results = results
|
|
||||||
|
|
||||||
def start_a(self, attr):
|
|
||||||
params = dict(attr)
|
|
||||||
#print params
|
|
||||||
if 'href' in params and params['href'].startswith("/torrent_download/"):
|
|
||||||
self.current_item = {}
|
|
||||||
self.td_counter = 0
|
|
||||||
self.start_name = False
|
|
||||||
torrent_id = '/'.join(params['href'].split('/')[2:])
|
|
||||||
self.current_item['link']=self.url+'/download/'+torrent_id
|
|
||||||
elif 'href' in params and params['href'].startswith("/torrent/") and params['href'].endswith(".html"):
|
|
||||||
self.current_item['desc_link'] = self.url + params['href'].strip()
|
|
||||||
self.start_name = True
|
|
||||||
|
|
||||||
def handle_data(self, data):
|
|
||||||
if self.td_counter == 2:
|
|
||||||
if 'name' not in self.current_item and self.start_name:
|
|
||||||
self.current_item['name'] = data.strip()
|
|
||||||
elif self.td_counter == 3:
|
|
||||||
if 'size' not in self.current_item:
|
|
||||||
self.current_item['size'] = ''
|
|
||||||
self.current_item['size']+= data.replace(" ", " ").strip()
|
|
||||||
elif self.td_counter == 4:
|
|
||||||
if 'seeds' not in self.current_item:
|
|
||||||
self.current_item['seeds'] = ''
|
|
||||||
self.current_item['seeds']+= data.strip()
|
|
||||||
elif self.td_counter == 5:
|
|
||||||
if 'leech' not in self.current_item:
|
|
||||||
self.current_item['leech'] = ''
|
|
||||||
self.current_item['leech']+= data.strip()
|
|
||||||
|
|
||||||
def start_td(self,attr):
|
|
||||||
if isinstance(self.td_counter,int):
|
|
||||||
self.td_counter += 1
|
|
||||||
if self.td_counter > 5:
|
|
||||||
self.td_counter = None
|
|
||||||
# Display item
|
|
||||||
if self.current_item:
|
if self.current_item:
|
||||||
self.current_item['engine_url'] = self.url
|
if tag == "a":
|
||||||
if not self.current_item['seeds'].isdigit():
|
params = dict(attrs)
|
||||||
self.current_item['seeds'] = 0
|
link = params['href']
|
||||||
if not self.current_item['leech'].isdigit():
|
|
||||||
self.current_item['leech'] = 0
|
|
||||||
prettyPrinter(self.current_item)
|
|
||||||
self.results.append('a')
|
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
if not link.startswith("/torrent"):
|
||||||
ret = []
|
return
|
||||||
i = 1
|
|
||||||
while True and i<11:
|
if link[8] == "/":
|
||||||
results = []
|
#description
|
||||||
parser = self.SimpleSGMLParser(results, self.url)
|
self.current_item["desc_link"] = "".join((self.url, link))
|
||||||
dat = retrieve_url(self.url+'/advanced_search/?with=%s&s_cat=%s&page=%d'%(what, self.supported_categories[cat], i))
|
#remove view at the beginning
|
||||||
results_re = re.compile('(?s)<table class="tl"><thead>.*')
|
self.current_item["name"] = params["title"][5:].replace("&", "&")
|
||||||
for match in results_re.finditer(dat):
|
self.pending_size = True
|
||||||
res_tab = match.group(0)
|
elif link[8] == "_":
|
||||||
parser.feed(res_tab)
|
#download link
|
||||||
|
link = link.replace("torrent_", "", 1)
|
||||||
|
self.current_item["link"] = "".join((self.url, link))
|
||||||
|
|
||||||
|
elif tag == "td":
|
||||||
|
if self.pending_size:
|
||||||
|
self.cur_item_name = "size"
|
||||||
|
self.current_item["size"] = ""
|
||||||
|
self.pending_size = False
|
||||||
|
|
||||||
|
for attr in attrs:
|
||||||
|
if attr[0] == "class":
|
||||||
|
if attr[1][0] == "s":
|
||||||
|
self.cur_item_name = "seeds"
|
||||||
|
self.current_item["seeds"] = ""
|
||||||
|
elif attr[1][0] == "l":
|
||||||
|
self.cur_item_name = "leech"
|
||||||
|
self.current_item["leech"] = ""
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
elif tag == "tr":
|
||||||
|
for attr in attrs:
|
||||||
|
if attr[0] == "class" and attr[1].startswith("tl"):
|
||||||
|
self.current_item = dict()
|
||||||
|
self.current_item["engine_url"] = self.url
|
||||||
|
break
|
||||||
|
|
||||||
|
elif self.pending_next_queries:
|
||||||
|
if tag == "a":
|
||||||
|
params = dict(attrs)
|
||||||
|
if params["title"] in self.next_queries_set:
|
||||||
|
return
|
||||||
|
self.list_searches.append(params['href'])
|
||||||
|
self.next_queries_set.add(params["title"])
|
||||||
|
if params["title"] == "10":
|
||||||
|
self.pending_next_queries = False
|
||||||
|
else:
|
||||||
|
self.pending_next_queries = False
|
||||||
|
|
||||||
|
elif self.next_queries:
|
||||||
|
if tag == "b" and ("class", "pager_no_link") in attrs:
|
||||||
|
self.next_queries = False
|
||||||
|
self.pending_next_queries = True
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
if self.cur_item_name:
|
||||||
|
temp = self.current_item[self.cur_item_name]
|
||||||
|
self.current_item[self.cur_item_name] = " ".join((temp, data))
|
||||||
|
#Due to utf-8 we need to handle data two times if there is space
|
||||||
|
if not self.cur_item_name == "size":
|
||||||
|
self.cur_item_name = None
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
if self.current_item:
|
||||||
|
if tag == "tr":
|
||||||
|
prettyPrinter(self.current_item)
|
||||||
|
self.current_item = None
|
||||||
|
|
||||||
|
def search(self, what, cat="all"):
|
||||||
|
""" Performs search """
|
||||||
|
connection = http("extratorrent.cc")
|
||||||
|
|
||||||
|
query = "".join(("/search/?new=1&search=", what, "&s_cat=", self.supported_categories[cat]))
|
||||||
|
|
||||||
|
connection.request("GET", query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
if response.status != 200:
|
||||||
|
return
|
||||||
|
|
||||||
|
list_searches = []
|
||||||
|
parser = self.MyHtmlParseWithBlackJack(list_searches, self.url)
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
parser.close()
|
parser.close()
|
||||||
break
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
for search_query in list_searches:
|
||||||
|
connection.request("GET", search_query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
|
parser.close()
|
||||||
|
|
||||||
|
connection.close()
|
||||||
|
return
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.03
|
#VERSION: 1.04
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
@ -36,10 +36,6 @@ class legittorrents(object):
|
||||||
name = 'legittorrents'
|
name = 'legittorrents'
|
||||||
supported_categories = {'all': '', 'movies': '1', 'tv': '13', 'music': '2', 'games': '3', 'anime': '5', 'books': '6'}
|
supported_categories = {'all': '', 'movies': '1', 'tv': '13', 'music': '2', 'games': '3', 'anime': '5', 'books': '6'}
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.results = []
|
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
|
||||||
|
|
||||||
def download_torrent(self, info):
|
def download_torrent(self, info):
|
||||||
print(download_file(info))
|
print(download_file(info))
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.51
|
#VERSION: 2.00
|
||||||
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
#AUTHORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
#CONTRIBUTORS: Diego de las Heras (diegodelasheras@gmail.com)
|
#CONTRIBUTORS: Diego de las Heras (diegodelasheras@gmail.com)
|
||||||
|
|
||||||
|
@ -26,90 +26,123 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
from html.parser import HTMLParser
|
||||||
|
from http.client import HTTPConnection as http
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file
|
||||||
import sgmllib3
|
|
||||||
import re
|
|
||||||
|
|
||||||
class mininova(object):
|
class mininova(object):
|
||||||
# Mandatory properties
|
""" Search engine class """
|
||||||
url = 'http://www.mininova.org'
|
url = 'http://www.mininova.org'
|
||||||
name = 'Mininova'
|
name = 'Mininova'
|
||||||
supported_categories = {'all': '0', 'movies': '4', 'tv': '8', 'music': '5', 'games': '3', 'anime': '1', 'software': '7', 'pictures': '6', 'books': '2'}
|
supported_categories = {'all' : '0',
|
||||||
|
'movies' : '4',
|
||||||
|
'tv' : '8',
|
||||||
|
'music' : '5',
|
||||||
|
'games' : '3',
|
||||||
|
'anime' : '1',
|
||||||
|
'software' : '7',
|
||||||
|
'pictures' : '6',
|
||||||
|
'books' : '2'}
|
||||||
|
|
||||||
def __init__(self):
|
def download_torrent(self, info):
|
||||||
self.results = []
|
print(download_file(info))
|
||||||
self.parser = self.SimpleSGMLParser(self.results, self.url)
|
|
||||||
|
|
||||||
def download_torrent(self, info):
|
class MyHtmlParseWithBlackJack(HTMLParser):
|
||||||
print(download_file(info))
|
""" Parser class """
|
||||||
|
def __init__(self, list_searches, url):
|
||||||
|
HTMLParser.__init__(self)
|
||||||
|
self.list_searches = list_searches
|
||||||
|
self.url = url
|
||||||
|
self.table_results = False
|
||||||
|
self.current_item = None
|
||||||
|
self.cur_item_name = None
|
||||||
|
self.next_queries = True
|
||||||
|
|
||||||
class SimpleSGMLParser(sgmllib3.SGMLParser):
|
def handle_starttag_tr(self, _):
|
||||||
def __init__(self, results, url, *args):
|
""" Handler of tr start tag """
|
||||||
sgmllib3.SGMLParser.__init__(self)
|
self.current_item = dict()
|
||||||
self.url = url
|
|
||||||
self.td_counter = None
|
|
||||||
self.current_item = None
|
|
||||||
self.results = results
|
|
||||||
|
|
||||||
def start_a(self, attr):
|
def handle_starttag_a(self, attrs):
|
||||||
params = dict(attr)
|
""" Handler of a start tag """
|
||||||
#print params
|
params = dict(attrs)
|
||||||
if 'href' in params:
|
link = params["href"]
|
||||||
if params['href'].startswith("/get/"):
|
|
||||||
self.current_item = {}
|
|
||||||
self.td_counter = 0
|
|
||||||
self.current_item['link']=self.url+params['href'].strip()
|
|
||||||
elif params['href'].startswith("/tor/") and self.current_item is not None:
|
|
||||||
self.current_item['desc_link']=self.url+params['href'].strip()
|
|
||||||
|
|
||||||
def handle_data(self, data):
|
if link.startswith("/tor/"):
|
||||||
if self.td_counter == 0:
|
#description
|
||||||
if 'name' not in self.current_item:
|
self.current_item["desc_link"] = "".join((self.url, link))
|
||||||
self.current_item['name'] = ''
|
#get download link from description by id
|
||||||
self.current_item['name']+= data
|
self.current_item["link"] = "".join((self.url, "/get/", link[5:-2]))
|
||||||
elif self.td_counter == 1:
|
self.cur_item_name = "name"
|
||||||
if 'size' not in self.current_item:
|
self.current_item["name"] = ""
|
||||||
self.current_item['size'] = ''
|
elif self.next_queries and link.startswith("/search"):
|
||||||
self.current_item['size']+= data.strip()
|
if params["title"].startswith("Page"):
|
||||||
elif self.td_counter == 2:
|
self.list_searches.append(link)
|
||||||
if 'seeds' not in self.current_item:
|
|
||||||
self.current_item['seeds'] = ''
|
|
||||||
self.current_item['seeds']+= data.strip()
|
|
||||||
elif self.td_counter == 3:
|
|
||||||
if 'leech' not in self.current_item:
|
|
||||||
self.current_item['leech'] = ''
|
|
||||||
self.current_item['leech']+= data.strip()
|
|
||||||
|
|
||||||
def start_td(self,attr):
|
def handle_starttag_td(self, attrs):
|
||||||
if isinstance(self.td_counter,int):
|
""" Handler of td start tag """
|
||||||
self.td_counter += 1
|
if ("align", "right") in attrs:
|
||||||
if self.td_counter > 4:
|
if not "size" in self.current_item:
|
||||||
self.td_counter = None
|
self.cur_item_name = "size"
|
||||||
# Display item
|
self.current_item["size"] = ""
|
||||||
if self.current_item:
|
|
||||||
self.current_item['engine_url'] = self.url
|
|
||||||
if not self.current_item['seeds'].isdigit():
|
|
||||||
self.current_item['seeds'] = 0
|
|
||||||
if not self.current_item['leech'].isdigit():
|
|
||||||
self.current_item['leech'] = 0
|
|
||||||
prettyPrinter(self.current_item)
|
|
||||||
self.results.append('a')
|
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
def handle_starttag_span(self, attrs):
|
||||||
ret = []
|
""" Handler of span start tag """
|
||||||
i = 1
|
if ("class", "g") in attrs:
|
||||||
while True and i<11:
|
self.cur_item_name = "seeds"
|
||||||
results = []
|
self.current_item["seeds"] = ""
|
||||||
parser = self.SimpleSGMLParser(results, self.url)
|
elif ("class", "b") in attrs:
|
||||||
dat = retrieve_url(self.url+'/search/%s/%s/seeds/%d'%(what, self.supported_categories[cat], i))
|
self.cur_item_name = "leech"
|
||||||
results_re = re.compile('(?s)<h1>Search results for.*')
|
self.current_item["leech"] = ""
|
||||||
for match in results_re.finditer(dat):
|
|
||||||
res_tab = match.group(0)
|
def handle_starttag(self, tag, attrs):
|
||||||
parser.feed(res_tab)
|
""" Parser's start tag handler """
|
||||||
|
if self.table_results:
|
||||||
|
dispatcher = getattr(self, "_".join(("handle_starttag", tag)), None)
|
||||||
|
if dispatcher:
|
||||||
|
dispatcher(attrs)
|
||||||
|
|
||||||
|
elif tag == "table":
|
||||||
|
self.table_results = ("class", "maintable") in attrs
|
||||||
|
|
||||||
|
def handle_endtag(self, tag):
|
||||||
|
""" Parser's end tag handler """
|
||||||
|
if tag == "tr" and self.current_item:
|
||||||
|
self.current_item["engine_url"] = self.url
|
||||||
|
prettyPrinter(self.current_item)
|
||||||
|
self.current_item = None
|
||||||
|
elif self.cur_item_name:
|
||||||
|
if tag == "a" or tag == "td":
|
||||||
|
self.cur_item_name = None
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
""" Parser's data handler """
|
||||||
|
if self.cur_item_name:
|
||||||
|
temp = self.current_item[self.cur_item_name]
|
||||||
|
self.current_item[self.cur_item_name] = " ".join((temp, data))
|
||||||
|
|
||||||
|
def search(self, what, cat="all"):
|
||||||
|
""" Performs search """
|
||||||
|
connection = http("www.mininova.org")
|
||||||
|
|
||||||
|
query = "/".join(("/search", what, self.supported_categories[cat], "seeds"))
|
||||||
|
|
||||||
|
connection.request("GET", query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
if response.status != 200:
|
||||||
|
return
|
||||||
|
|
||||||
|
list_searches = []
|
||||||
|
parser = self.MyHtmlParseWithBlackJack(list_searches, self.url)
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
parser.close()
|
parser.close()
|
||||||
break
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
parser.next_queries = False
|
||||||
|
for search_query in list_searches:
|
||||||
|
connection.request("GET", search_query)
|
||||||
|
response = connection.getresponse()
|
||||||
|
parser.feed(response.read().decode('utf-8'))
|
||||||
|
parser.close()
|
||||||
|
|
||||||
|
connection.close()
|
||||||
|
return
|
||||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 252 B After Width: | Height: | Size: 951 B |
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 1.33
|
#VERSION: 1.36
|
||||||
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
|
#AUTHORS: Gekko Dam Beer (gekko04@users.sourceforge.net)
|
||||||
#CONTRIBUTORS: Christophe Dumez (chris@qbittorrent.org)
|
#CONTRIBUTORS: Christophe Dumez (chris@qbittorrent.org)
|
||||||
# Bruno Barbieri (brunorex@gmail.com)
|
# Bruno Barbieri (brunorex@gmail.com)
|
||||||
|
@ -28,91 +28,84 @@
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
from novaprinter import prettyPrinter
|
from novaprinter import prettyPrinter
|
||||||
from helpers import retrieve_url, download_file
|
from helpers import download_file, retrieve_url
|
||||||
from urllib import error, parse
|
from urllib import parse
|
||||||
from html.parser import HTMLParser
|
from html.parser import HTMLParser
|
||||||
import re
|
from re import compile as re_compile
|
||||||
|
|
||||||
class torrentreactor(object):
|
class torrentreactor(object):
|
||||||
url = 'http://www.torrentreactor.net'
|
url = 'http://www.torrentreactor.net'
|
||||||
name = 'TorrentReactor.Net'
|
name = 'TorrentReactor'
|
||||||
supported_categories = {'all': '', 'movies': '5', 'tv': '8', 'music': '6', 'games': '3', 'anime': '1', 'software': '2'}
|
supported_categories = {'all': '', 'movies': '5', 'tv': '8', 'music': '6', 'games': '3', 'anime': '1', 'software': '2'}
|
||||||
|
|
||||||
def download_torrent(self, info):
|
def download_torrent(self, info):
|
||||||
print(download_file(info))
|
print(download_file(info))
|
||||||
|
|
||||||
class SimpleHTMLParser(HTMLParser):
|
class SimpleHTMLParser(HTMLParser):
|
||||||
def __init__(self, results, url, *args):
|
def __init__(self, results, url, *args):
|
||||||
HTMLParser.__init__(self)
|
HTMLParser.__init__(self)
|
||||||
self.td_counter = None
|
self.td_counter = None
|
||||||
self.current_item = None
|
self.current_item = None
|
||||||
self.results = results
|
self.results = results
|
||||||
self.id = None
|
self.id = None
|
||||||
self.url = url
|
self.url = url
|
||||||
self.dispatcher = { 'a' : self.start_a, 'td' : self.start_td }
|
self.torrents_matcher = re_compile("/torrents/\d+.*")
|
||||||
|
self.dispatcher = { 'a' : self.start_a, 'td' : self.start_td }
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
def handle_starttag(self, tag, attrs):
|
||||||
if tag in self.dispatcher:
|
if tag in self.dispatcher:
|
||||||
self.dispatcher[tag](attrs)
|
self.dispatcher[tag](attrs)
|
||||||
|
|
||||||
def start_a(self, attr):
|
def start_a(self, attr):
|
||||||
params = dict(attr)
|
params = dict(attr)
|
||||||
if re.match("/torrents/\d+.*", params['href']):
|
if self.torrents_matcher.match(params['href']):
|
||||||
self.current_item = {}
|
self.current_item = {}
|
||||||
self.current_item['desc_link'] = self.url+params['href'].strip()
|
self.current_item['desc_link'] = self.url+params['href'].strip()
|
||||||
elif 'torrentreactor.net/download.php' in params['href']:
|
elif 'torrentreactor.net/download.php' in params['href']:
|
||||||
self.td_counter = 0
|
self.td_counter = 0
|
||||||
self.current_item['link'] = params['href'].strip()
|
self.current_item['link'] = params['href'].strip()
|
||||||
self.current_item['name'] = parse.unquote_plus(params['href'].split('&')[1].split('name=')[1])
|
self.current_item['name'] = parse.unquote_plus(params['href'].split('&')[1].split('name=')[1])
|
||||||
|
|
||||||
def handle_data(self, data):
|
def handle_data(self, data):
|
||||||
if self.td_counter == 1:
|
if self.td_counter == 1:
|
||||||
if 'size' not in self.current_item:
|
if 'size' not in self.current_item:
|
||||||
self.current_item['size'] = ''
|
self.current_item['size'] = ''
|
||||||
self.current_item['size']+= data.strip()
|
self.current_item['size']+= data.strip()
|
||||||
elif self.td_counter == 2:
|
elif self.td_counter == 2:
|
||||||
if 'seeds' not in self.current_item:
|
if 'seeds' not in self.current_item:
|
||||||
self.current_item['seeds'] = ''
|
self.current_item['seeds'] = ''
|
||||||
self.current_item['seeds']+= data.strip()
|
self.current_item['seeds']+= data.strip()
|
||||||
elif self.td_counter == 3:
|
elif self.td_counter == 3:
|
||||||
if 'leech' not in self.current_item:
|
if 'leech' not in self.current_item:
|
||||||
self.current_item['leech'] = ''
|
self.current_item['leech'] = ''
|
||||||
self.current_item['leech']+= data.strip()
|
self.current_item['leech']+= data.strip()
|
||||||
|
|
||||||
def start_td(self,attr):
|
def start_td(self,attr):
|
||||||
if isinstance(self.td_counter,int):
|
if isinstance(self.td_counter,int):
|
||||||
self.td_counter += 1
|
self.td_counter += 1
|
||||||
if self.td_counter > 3:
|
if self.td_counter > 3:
|
||||||
self.td_counter = None
|
self.td_counter = None
|
||||||
# add item to results
|
# add item to results
|
||||||
if self.current_item:
|
if self.current_item:
|
||||||
self.current_item['engine_url'] = self.url
|
self.current_item['engine_url'] = self.url
|
||||||
if not self.current_item['seeds'].isdigit():
|
if not self.current_item['seeds'].isdigit():
|
||||||
self.current_item['seeds'] = 0
|
self.current_item['seeds'] = 0
|
||||||
if not self.current_item['leech'].isdigit():
|
if not self.current_item['leech'].isdigit():
|
||||||
self.current_item['leech'] = 0
|
self.current_item['leech'] = 0
|
||||||
prettyPrinter(self.current_item)
|
prettyPrinter(self.current_item)
|
||||||
self.has_results = True
|
self.has_results = True
|
||||||
self.results.append('a')
|
self.results.append('a')
|
||||||
|
|
||||||
def __init__(self):
|
def search(self, what, cat='all'):
|
||||||
self.results = []
|
i = 0
|
||||||
self.parser = self.SimpleHTMLParser(self.results, self.url)
|
dat = ''
|
||||||
|
|
||||||
def search(self, what, cat='all'):
|
while i < 11:
|
||||||
i = 0
|
results = []
|
||||||
dat = ''
|
parser = self.SimpleHTMLParser(results, self.url)
|
||||||
while True and i<11:
|
dat = retrieve_url('%s/torrent-search/%s/%s?sort=seeders.desc&type=all&period=none&categories=%s'%(self.url, what, (i*35), self.supported_categories[cat]))
|
||||||
results = []
|
parser.feed(dat)
|
||||||
parser = self.SimpleHTMLParser(results, self.url)
|
parser.close()
|
||||||
|
if len(results) <= 0:
|
||||||
try:
|
break
|
||||||
dat = retrieve_url(self.url+'/torrent-search/%s/%s?sort=seeders.desc&type=all&period=none&categories=%s'%(what, (i*35), self.supported_categories[cat]))
|
i += 1
|
||||||
except error.HTTPError:
|
|
||||||
break
|
|
||||||
|
|
||||||
parser.feed(dat)
|
|
||||||
parser.close()
|
|
||||||
if len(results) <= 0:
|
|
||||||
break
|
|
||||||
i += 1
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#VERSION: 2.13
|
#VERSION: 2.14
|
||||||
#AUTHORS: Diego de las Heras (diegodelasheras@gmail.com)
|
#AUTHORS: Diego de las Heras (diegodelasheras@gmail.com)
|
||||||
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
torrentreactor: 1.33
|
|
||||||
mininova: 1.51
|
|
||||||
piratebay: 2.11
|
|
||||||
extratorrent: 1.2
|
extratorrent: 1.2
|
||||||
|
torrentreactor: 1.36
|
||||||
|
mininova: 2.00
|
||||||
|
piratebay: 2.11
|
||||||
|
extratorrent: 2.0
|
||||||
kickasstorrents: 1.26
|
kickasstorrents: 1.26
|
||||||
btdigg: 1.23
|
btdigg: 1.23
|
||||||
legittorrents: 1.03
|
torrentz: 2.14
|
||||||
torrentz: 2.13
|
legittorrents: 1.04
|
||||||
|
|
|
@ -26,7 +26,7 @@
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
|
||||||
#VERSION: 1.24
|
#VERSION: 1.40
|
||||||
|
|
||||||
# Author:
|
# Author:
|
||||||
# Fabien Devaux <fab AT gnux DOT info>
|
# Fabien Devaux <fab AT gnux DOT info>
|
||||||
|
@ -37,14 +37,14 @@
|
||||||
#
|
#
|
||||||
# Licence: BSD
|
# Licence: BSD
|
||||||
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import os
|
|
||||||
import glob
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from os import path, cpu_count
|
||||||
|
from glob import glob
|
||||||
|
from sys import argv
|
||||||
|
from multiprocessing import Pool
|
||||||
|
|
||||||
THREADED = True
|
THREADED = True
|
||||||
CATEGORIES = ('all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pictures', 'books')
|
CATEGORIES = {'all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pictures', 'books'}
|
||||||
|
|
||||||
################################################################################
|
################################################################################
|
||||||
# Every engine should have a "search" method taking
|
# Every engine should have a "search" method taking
|
||||||
|
@ -54,105 +54,129 @@ CATEGORIES = ('all', 'movies', 'tv', 'music', 'games', 'anime', 'software', 'pic
|
||||||
# As a convention, try to list results by decrasing number of seeds or similar
|
# As a convention, try to list results by decrasing number of seeds or similar
|
||||||
################################################################################
|
################################################################################
|
||||||
|
|
||||||
supported_engines = []
|
def initialize_engines():
|
||||||
|
""" Import available engines
|
||||||
|
|
||||||
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
Return list of available engines
|
||||||
for engine in engines:
|
"""
|
||||||
e = engine.split(os.sep)[-1][:-3]
|
supported_engines = []
|
||||||
if len(e.strip()) == 0: continue
|
|
||||||
if e.startswith('_'): continue
|
|
||||||
try:
|
|
||||||
exec("from engines.%s import %s"%(e,e))
|
|
||||||
supported_engines.append(e)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def engineToXml(short_name):
|
engines = glob(path.join(path.dirname(__file__), 'engines', '*.py'))
|
||||||
xml = "<%s>\n"%short_name
|
for engine in engines:
|
||||||
exec("search_engine = %s()"%short_name, globals())
|
engi = path.basename(engine).split('.')[0].strip()
|
||||||
xml += "<name>%s</name>\n"%search_engine.name
|
if len(engi) == 0 or engi.startswith('_'):
|
||||||
xml += "<url>%s</url>\n"%search_engine.url
|
continue
|
||||||
xml += "<categories>"
|
try:
|
||||||
if hasattr(search_engine, 'supported_categories'):
|
#import engines.[engine]
|
||||||
supported_categories = list(search_engine.supported_categories.keys())
|
engine_module = __import__(".".join(("engines", engi)))
|
||||||
supported_categories.remove('all')
|
#get low-level module
|
||||||
xml += " ".join(supported_categories)
|
engine_module = getattr(engine_module, engi)
|
||||||
xml += "</categories>\n"
|
#bind class name
|
||||||
xml += "</%s>\n"%short_name
|
globals()[engi] = getattr(engine_module, engi)
|
||||||
return xml
|
supported_engines.append(engi)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def displayCapabilities():
|
return supported_engines
|
||||||
"""
|
|
||||||
Display capabilities in XML format
|
|
||||||
<capabilities>
|
|
||||||
<engine_short_name>
|
|
||||||
<name>long name</name>
|
|
||||||
<url>http://example.com</url>
|
|
||||||
<categories>movies music games</categories>
|
|
||||||
</engine_short_name>
|
|
||||||
</capabilities>
|
|
||||||
"""
|
|
||||||
xml = "<capabilities>"
|
|
||||||
for short_name in supported_engines:
|
|
||||||
xml += engineToXml(short_name)
|
|
||||||
xml += "</capabilities>"
|
|
||||||
print(xml)
|
|
||||||
|
|
||||||
class EngineLauncher(threading.Thread):
|
def engines_to_xml(supported_engines):
|
||||||
def __init__(self, engine, what, cat='all'):
|
""" Generates xml for supported engines """
|
||||||
threading.Thread.__init__(self)
|
tab = " " * 4
|
||||||
self.engine = engine
|
|
||||||
self.what = what
|
|
||||||
self.cat = cat
|
|
||||||
def run(self):
|
|
||||||
if hasattr(self.engine, 'supported_categories'):
|
|
||||||
if self.cat == 'all' or self.cat in list(self.engine.supported_categories.keys()):
|
|
||||||
self.engine.search(self.what, self.cat)
|
|
||||||
elif self.cat == 'all':
|
|
||||||
self.engine.search(self.what)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
for short_name in supported_engines:
|
||||||
if len(sys.argv) < 2:
|
search_engine = globals()[short_name]()
|
||||||
raise SystemExit('./nova2.py [all|engine1[,engine2]*] <category> <keywords>\navailable engines: %s'%
|
|
||||||
(','.join(supported_engines)))
|
|
||||||
|
|
||||||
if len(sys.argv) == 2:
|
supported_categories = ""
|
||||||
if sys.argv[1] == "--capabilities":
|
if hasattr(search_engine, "supported_categories"):
|
||||||
displayCapabilities()
|
supported_categories = " ".join((key for key in search_engine.supported_categories.keys()
|
||||||
sys.exit(0)
|
if key is not "all"))
|
||||||
else:
|
|
||||||
raise SystemExit('./nova.py [all|engine1[,engine2]*] <category> <keywords>\navailable engines: %s'%
|
|
||||||
(','.join(supported_engines)))
|
|
||||||
|
|
||||||
engines_list = [e.lower() for e in sys.argv[1].strip().split(',')]
|
yield "".join((tab, "<", short_name, ">\n",
|
||||||
|
tab, tab, "<name>", search_engine.name, "</name>\n",
|
||||||
|
tab, tab, "<url>", search_engine.url, "</url>\n",
|
||||||
|
tab, tab, "<categories>", supported_categories, "</categories>\n",
|
||||||
|
tab, "</", short_name, ">\n"))
|
||||||
|
|
||||||
if 'all' in engines_list:
|
def displayCapabilities(supported_engines):
|
||||||
engines_list = supported_engines
|
"""
|
||||||
|
Display capabilities in XML format
|
||||||
|
<capabilities>
|
||||||
|
<engine_short_name>
|
||||||
|
<name>long name</name>
|
||||||
|
<url>http://example.com</url>
|
||||||
|
<categories>movies music games</categories>
|
||||||
|
</engine_short_name>
|
||||||
|
</capabilities>
|
||||||
|
"""
|
||||||
|
xml = "".join(("<capabilities>\n",
|
||||||
|
"".join(engines_to_xml(supported_engines)),
|
||||||
|
"</capabilities>"))
|
||||||
|
print(xml)
|
||||||
|
|
||||||
cat = sys.argv[2].lower()
|
def run_search(engine_list):
|
||||||
|
""" Run search in engine
|
||||||
|
|
||||||
if cat not in CATEGORIES:
|
@param engine_list List with engine, query and category
|
||||||
raise SystemExit('Invalid category!')
|
|
||||||
|
|
||||||
what = urllib.parse.quote(' '.join(sys.argv[3:]))
|
@retval False if any exceptions occured
|
||||||
|
@retval True otherwise
|
||||||
|
"""
|
||||||
|
engine, what, cat = engine_list
|
||||||
|
try:
|
||||||
|
engine = engine()
|
||||||
|
#avoid exceptions due to invalid category
|
||||||
|
if hasattr(engine, 'supported_categories'):
|
||||||
|
cat = cat if cat in engine.supported_categories else "all"
|
||||||
|
engine.search(what, cat)
|
||||||
|
else:
|
||||||
|
engine.search(what)
|
||||||
|
|
||||||
threads = []
|
return True
|
||||||
for engine in engines_list:
|
except:
|
||||||
try:
|
return False
|
||||||
if THREADED:
|
|
||||||
exec("l = EngineLauncher(%s(), what, cat)"%engine)
|
def main(args):
|
||||||
threads.append(l)
|
supported_engines = initialize_engines()
|
||||||
l.start()
|
|
||||||
else:
|
if not args:
|
||||||
exec("e = %s()"%engine)
|
raise SystemExit("./nova2.py [all|engine1[,engine2]*] <category> <keywords>\n"
|
||||||
if hasattr(engine, 'supported_categories'):
|
"available engines: %s" % (','.join(supported_engines)))
|
||||||
if cat == 'all' or cat in list(e.supported_categories.keys()):
|
|
||||||
e.search(what, cat)
|
elif args[0] == "--capabilities":
|
||||||
elif self.cat == 'all':
|
displayCapabilities(supported_engines)
|
||||||
e.search(what)
|
return
|
||||||
engine().search(what, cat)
|
|
||||||
except:
|
elif len(args) < 3:
|
||||||
pass
|
raise SystemExit("./nova2.py [all|engine1[,engine2]*] <category> <keywords>\n"
|
||||||
if THREADED:
|
"available engines: %s" % (','.join(supported_engines)))
|
||||||
for t in threads:
|
|
||||||
t.join()
|
#get only unique engines with set
|
||||||
|
engines_list = set(e.lower() for e in args[0].strip().split(','))
|
||||||
|
|
||||||
|
if 'all' in engines_list:
|
||||||
|
engines_list = supported_engines
|
||||||
|
else:
|
||||||
|
#discard un-supported engines
|
||||||
|
engines_list = [engine for engine in engines_list
|
||||||
|
if engine in supported_engines]
|
||||||
|
|
||||||
|
if not engines_list:
|
||||||
|
#engine list is empty. Nothing to do here
|
||||||
|
return
|
||||||
|
|
||||||
|
cat = args[1].lower()
|
||||||
|
|
||||||
|
if cat not in CATEGORIES:
|
||||||
|
raise SystemExit(" - ".join(('Invalid category', cat)))
|
||||||
|
|
||||||
|
what = urllib.parse.quote(' '.join(args[2:]))
|
||||||
|
if THREADED:
|
||||||
|
#child process spawning is controlled min(number of searches, number of cpu)
|
||||||
|
with Pool(min(len(engines_list), cpu_count())) as pool:
|
||||||
|
pool.map(run_search, ([globals()[engine], what, cat] for engine in engines_list))
|
||||||
|
else:
|
||||||
|
#py3 note: map is needed to be evaluated for content to be executed
|
||||||
|
all(map(run_search, ([globals()[engine], what, cat] for engine in engines_list)))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main(argv[1:])
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
# POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
#VERSION: 1.10
|
#VERSION: 1.20
|
||||||
|
|
||||||
# Author:
|
# Author:
|
||||||
# Christophe DUMEZ (chris@qbittorrent.org)
|
# Christophe DUMEZ (chris@qbittorrent.org)
|
||||||
|
@ -39,26 +39,26 @@ supported_engines = dict()
|
||||||
|
|
||||||
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
engines = glob.glob(os.path.join(os.path.dirname(__file__), 'engines','*.py'))
|
||||||
for engine in engines:
|
for engine in engines:
|
||||||
e = engine.split(os.sep)[-1][:-3]
|
e = engine.split(os.sep)[-1][:-3]
|
||||||
if len(e.strip()) == 0: continue
|
if len(e.strip()) == 0: continue
|
||||||
if e.startswith('_'): continue
|
if e.startswith('_'): continue
|
||||||
try:
|
try:
|
||||||
exec("from engines.%s import %s"%(e,e))
|
exec("from engines.%s import %s"%(e,e))
|
||||||
exec("engine_url = %s.url"%e)
|
exec("engine_url = %s.url"%e)
|
||||||
supported_engines[engine_url] = e
|
supported_engines[engine_url] = e
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
raise SystemExit('./nova2dl.py engine_url download_parameter')
|
raise SystemExit('./nova2dl.py engine_url download_parameter')
|
||||||
engine_url = sys.argv[1].strip()
|
engine_url = sys.argv[1].strip()
|
||||||
download_param = sys.argv[2].strip()
|
download_param = sys.argv[2].strip()
|
||||||
if engine_url not in list(supported_engines.keys()):
|
if engine_url not in list(supported_engines.keys()):
|
||||||
raise SystemExit('./nova2dl.py: this engine_url was not recognized')
|
raise SystemExit('./nova2dl.py: this engine_url was not recognized')
|
||||||
exec("engine = %s()"%supported_engines[engine_url])
|
exec("engine = %s()"%supported_engines[engine_url])
|
||||||
if hasattr(engine, 'download_torrent'):
|
if hasattr(engine, 'download_torrent'):
|
||||||
engine.download_torrent(download_param)
|
engine.download_torrent(download_param)
|
||||||
else:
|
else:
|
||||||
print(download_file(download_param))
|
print(download_file(download_param))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
|
@ -26,41 +26,39 @@
|
||||||
|
|
||||||
|
|
||||||
def prettyPrinter(dictionary):
|
def prettyPrinter(dictionary):
|
||||||
outtext = ''
|
dictionary['size'] = anySizeToBytes(dictionary['size'])
|
||||||
dictionary['size'] = anySizeToBytes(dictionary['size'])
|
outtext = "|".join((dictionary["link"], dictionary["name"].replace("|", " "), str(dictionary["size"]), str(dictionary["seeds"]), str(dictionary["leech"]), dictionary["engine_url"]))
|
||||||
if 'desc_link' in dictionary:
|
if 'desc_link' in dictionary:
|
||||||
outtext = '%s|%s|%s|%s|%s|%s|%s'%(dictionary['link'],dictionary['name'].replace('|',' '),dictionary['size'],dictionary['seeds'],dictionary['leech'],dictionary['engine_url'],dictionary['desc_link'])
|
outtext = "|".join((outtext, dictionary["desc_link"]))
|
||||||
else:
|
|
||||||
outtext = '%s|%s|%s|%s|%s|%s'%(dictionary['link'],dictionary['name'].replace('|',' '),dictionary['size'],dictionary['seeds'],dictionary['leech'],dictionary['engine_url'])
|
|
||||||
|
|
||||||
# fd 1 is stdout
|
# fd 1 is stdout
|
||||||
with open(1, 'w', encoding='utf-8', closefd=False) as utf8stdout:
|
with open(1, 'w', encoding='utf-8', closefd=False) as utf8stdout:
|
||||||
print(outtext, file=utf8stdout)
|
print(outtext, file=utf8stdout)
|
||||||
|
|
||||||
def anySizeToBytes(size_string):
|
def anySizeToBytes(size_string):
|
||||||
"""
|
"""
|
||||||
Convert a string like '1 KB' to '1024' (bytes)
|
Convert a string like '1 KB' to '1024' (bytes)
|
||||||
"""
|
"""
|
||||||
# separate integer from unit
|
# separate integer from unit
|
||||||
try:
|
try:
|
||||||
size, unit = size_string.split()
|
size, unit = size_string.split()
|
||||||
except:
|
except:
|
||||||
try:
|
try:
|
||||||
size = size_string.strip()
|
size = size_string.strip()
|
||||||
unit = ''.join([c for c in size if c.isalpha()])
|
unit = ''.join([c for c in size if c.isalpha()])
|
||||||
if len(unit) > 0:
|
if len(unit) > 0:
|
||||||
size = size[:-len(unit)]
|
size = size[:-len(unit)]
|
||||||
except:
|
except:
|
||||||
return -1
|
return -1
|
||||||
if len(size) == 0:
|
if len(size) == 0:
|
||||||
return -1
|
return -1
|
||||||
size = float(size)
|
size = float(size)
|
||||||
if len(unit) == 0:
|
if len(unit) == 0:
|
||||||
return int(size)
|
return int(size)
|
||||||
short_unit = unit.upper()[0]
|
short_unit = unit.upper()[0]
|
||||||
|
|
||||||
# convert
|
# convert
|
||||||
units_dict = { 'T': 40, 'G': 30, 'M': 20, 'K': 10 }
|
units_dict = {'T': 40, 'G': 30, 'M': 20, 'K': 10}
|
||||||
if short_unit in units_dict:
|
if short_unit in units_dict:
|
||||||
size = size * 2**units_dict[short_unit]
|
size = size * 2**units_dict[short_unit]
|
||||||
return int(size)
|
return int(size)
|
||||||
|
|
|
@ -42,350 +42,350 @@ _defaultproxy = None
|
||||||
_orgsocket = socket.socket
|
_orgsocket = socket.socket
|
||||||
|
|
||||||
class ProxyError(Exception):
|
class ProxyError(Exception):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class GeneralProxyError(ProxyError):
|
class GeneralProxyError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks5AuthError(ProxyError):
|
class Socks5AuthError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks5Error(ProxyError):
|
class Socks5Error(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class Socks4Error(ProxyError):
|
class Socks4Error(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
class HTTPError(ProxyError):
|
class HTTPError(ProxyError):
|
||||||
def __init__(self, value):
|
def __init__(self, value):
|
||||||
self.value = value
|
self.value = value
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return repr(self.value)
|
return repr(self.value)
|
||||||
|
|
||||||
_generalerrors = ("success",
|
_generalerrors = ("success",
|
||||||
"invalid data",
|
"invalid data",
|
||||||
"not connected",
|
"not connected",
|
||||||
"not available",
|
"not available",
|
||||||
"bad proxy type",
|
"bad proxy type",
|
||||||
"bad input")
|
"bad input")
|
||||||
|
|
||||||
_socks5errors = ("succeeded",
|
_socks5errors = ("succeeded",
|
||||||
"general SOCKS server failure",
|
"general SOCKS server failure",
|
||||||
"connection not allowed by ruleset",
|
"connection not allowed by ruleset",
|
||||||
"Network unreachable",
|
"Network unreachable",
|
||||||
"Host unreachable",
|
"Host unreachable",
|
||||||
"Connection refused",
|
"Connection refused",
|
||||||
"TTL expired",
|
"TTL expired",
|
||||||
"Command not supported",
|
"Command not supported",
|
||||||
"Address type not supported",
|
"Address type not supported",
|
||||||
"Unknown error")
|
"Unknown error")
|
||||||
|
|
||||||
_socks5autherrors = ("succeeded",
|
_socks5autherrors = ("succeeded",
|
||||||
"authentication is required",
|
"authentication is required",
|
||||||
"all offered authentication methods were rejected",
|
"all offered authentication methods were rejected",
|
||||||
"unknown username or invalid password",
|
"unknown username or invalid password",
|
||||||
"unknown error")
|
"unknown error")
|
||||||
|
|
||||||
_socks4errors = ("request granted",
|
_socks4errors = ("request granted",
|
||||||
"request rejected or failed",
|
"request rejected or failed",
|
||||||
"request rejected because SOCKS server cannot connect to identd on the client",
|
"request rejected because SOCKS server cannot connect to identd on the client",
|
||||||
"request rejected because the client program and identd report different user-ids",
|
"request rejected because the client program and identd report different user-ids",
|
||||||
"unknown error")
|
"unknown error")
|
||||||
|
|
||||||
def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
||||||
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||||
Sets a default proxy which all further socksocket objects will use,
|
Sets a default proxy which all further socksocket objects will use,
|
||||||
unless explicitly changed.
|
unless explicitly changed.
|
||||||
"""
|
"""
|
||||||
global _defaultproxy
|
global _defaultproxy
|
||||||
_defaultproxy = (proxytype,addr,port,rdns,username,password)
|
_defaultproxy = (proxytype,addr,port,rdns,username,password)
|
||||||
|
|
||||||
class socksocket(socket.socket):
|
class socksocket(socket.socket):
|
||||||
"""socksocket([family[, type[, proto]]]) -> socket object
|
"""socksocket([family[, type[, proto]]]) -> socket object
|
||||||
|
|
||||||
Open a SOCKS enabled socket. The parameters are the same as
|
Open a SOCKS enabled socket. The parameters are the same as
|
||||||
those of the standard socket init. In order for SOCKS to work,
|
those of the standard socket init. In order for SOCKS to work,
|
||||||
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
|
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
|
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
|
||||||
_orgsocket.__init__(self,family,type,proto,_sock)
|
_orgsocket.__init__(self,family,type,proto,_sock)
|
||||||
if _defaultproxy != None:
|
if _defaultproxy != None:
|
||||||
self.__proxy = _defaultproxy
|
self.__proxy = _defaultproxy
|
||||||
else:
|
else:
|
||||||
self.__proxy = (None, None, None, None, None, None)
|
self.__proxy = (None, None, None, None, None, None)
|
||||||
self.__proxysockname = None
|
self.__proxysockname = None
|
||||||
self.__proxypeername = None
|
self.__proxypeername = None
|
||||||
|
|
||||||
def __recvall(self, bytes):
|
def __recvall(self, bytes):
|
||||||
"""__recvall(bytes) -> data
|
"""__recvall(bytes) -> data
|
||||||
Receive EXACTLY the number of bytes requested from the socket.
|
Receive EXACTLY the number of bytes requested from the socket.
|
||||||
Blocks until the required number of bytes have been received.
|
Blocks until the required number of bytes have been received.
|
||||||
"""
|
"""
|
||||||
data = ""
|
data = ""
|
||||||
while len(data) < bytes:
|
while len(data) < bytes:
|
||||||
d = self.recv(bytes-len(data))
|
d = self.recv(bytes-len(data))
|
||||||
if not d:
|
if not d:
|
||||||
raise GeneralProxyError("connection closed unexpectedly")
|
raise GeneralProxyError("connection closed unexpectedly")
|
||||||
data = data + d
|
data = data + d
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
|
||||||
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
|
||||||
Sets the proxy to be used.
|
Sets the proxy to be used.
|
||||||
proxytype - The type of the proxy to be used. Three types
|
proxytype - The type of the proxy to be used. Three types
|
||||||
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
|
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
|
||||||
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
|
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
|
||||||
addr - The address of the server (IP or DNS).
|
addr - The address of the server (IP or DNS).
|
||||||
port - The port of the server. Defaults to 1080 for SOCKS
|
port - The port of the server. Defaults to 1080 for SOCKS
|
||||||
servers and 8080 for HTTP proxy servers.
|
servers and 8080 for HTTP proxy servers.
|
||||||
rdns - Should DNS queries be preformed on the remote side
|
rdns - Should DNS queries be preformed on the remote side
|
||||||
(rather than the local side). The default is True.
|
(rather than the local side). The default is True.
|
||||||
Note: This has no effect with SOCKS4 servers.
|
Note: This has no effect with SOCKS4 servers.
|
||||||
username - Username to authenticate with to the server.
|
username - Username to authenticate with to the server.
|
||||||
The default is no authentication.
|
The default is no authentication.
|
||||||
password - Password to authenticate with to the server.
|
password - Password to authenticate with to the server.
|
||||||
Only relevant when username is also provided.
|
Only relevant when username is also provided.
|
||||||
"""
|
"""
|
||||||
self.__proxy = (proxytype,addr,port,rdns,username,password)
|
self.__proxy = (proxytype,addr,port,rdns,username,password)
|
||||||
|
|
||||||
def __negotiatesocks5(self,destaddr,destport):
|
def __negotiatesocks5(self,destaddr,destport):
|
||||||
"""__negotiatesocks5(self,destaddr,destport)
|
"""__negotiatesocks5(self,destaddr,destport)
|
||||||
Negotiates a connection through a SOCKS5 server.
|
Negotiates a connection through a SOCKS5 server.
|
||||||
"""
|
"""
|
||||||
# First we'll send the authentication packages we support.
|
# First we'll send the authentication packages we support.
|
||||||
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
|
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
|
||||||
# The username/password details were supplied to the
|
# The username/password details were supplied to the
|
||||||
# setproxy method so we support the USERNAME/PASSWORD
|
# setproxy method so we support the USERNAME/PASSWORD
|
||||||
# authentication (in addition to the standard none).
|
# authentication (in addition to the standard none).
|
||||||
self.sendall("\x05\x02\x00\x02")
|
self.sendall("\x05\x02\x00\x02")
|
||||||
else:
|
else:
|
||||||
# No username/password were entered, therefore we
|
# No username/password were entered, therefore we
|
||||||
# only support connections with no authentication.
|
# only support connections with no authentication.
|
||||||
self.sendall("\x05\x01\x00")
|
self.sendall("\x05\x01\x00")
|
||||||
# We'll receive the server's response to determine which
|
# We'll receive the server's response to determine which
|
||||||
# method was selected
|
# method was selected
|
||||||
chosenauth = self.__recvall(2)
|
chosenauth = self.__recvall(2)
|
||||||
if chosenauth[0] != "\x05":
|
if chosenauth[0] != "\x05":
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
# Check the chosen authentication method
|
# Check the chosen authentication method
|
||||||
if chosenauth[1] == "\x00":
|
if chosenauth[1] == "\x00":
|
||||||
# No authentication is required
|
# No authentication is required
|
||||||
pass
|
pass
|
||||||
elif chosenauth[1] == "\x02":
|
elif chosenauth[1] == "\x02":
|
||||||
# Okay, we need to perform a basic username/password
|
# Okay, we need to perform a basic username/password
|
||||||
# authentication.
|
# authentication.
|
||||||
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
|
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
|
||||||
authstat = self.__recvall(2)
|
authstat = self.__recvall(2)
|
||||||
if authstat[0] != "\x01":
|
if authstat[0] != "\x01":
|
||||||
# Bad response
|
# Bad response
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if authstat[1] != "\x00":
|
if authstat[1] != "\x00":
|
||||||
# Authentication failed
|
# Authentication failed
|
||||||
self.close()
|
self.close()
|
||||||
raise Socks5AuthError((3,_socks5autherrors[3]))
|
raise Socks5AuthError((3,_socks5autherrors[3]))
|
||||||
# Authentication succeeded
|
# Authentication succeeded
|
||||||
else:
|
else:
|
||||||
# Reaching here is always bad
|
# Reaching here is always bad
|
||||||
self.close()
|
self.close()
|
||||||
if chosenauth[1] == "\xFF":
|
if chosenauth[1] == "\xFF":
|
||||||
raise Socks5AuthError((2,_socks5autherrors[2]))
|
raise Socks5AuthError((2,_socks5autherrors[2]))
|
||||||
else:
|
else:
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
# Now we can request the actual connection
|
# Now we can request the actual connection
|
||||||
req = "\x05\x01\x00"
|
req = "\x05\x01\x00"
|
||||||
# If the given destination address is an IP address, we'll
|
# If the given destination address is an IP address, we'll
|
||||||
# use the IPv4 address request even if remote resolving was specified.
|
# use the IPv4 address request even if remote resolving was specified.
|
||||||
try:
|
try:
|
||||||
ipaddr = socket.inet_aton(destaddr)
|
ipaddr = socket.inet_aton(destaddr)
|
||||||
req = req + "\x01" + ipaddr
|
req = req + "\x01" + ipaddr
|
||||||
except socket.error:
|
except socket.error:
|
||||||
# Well it's not an IP number, so it's probably a DNS name.
|
# Well it's not an IP number, so it's probably a DNS name.
|
||||||
if self.__proxy[3]==True:
|
if self.__proxy[3]==True:
|
||||||
# Resolve remotely
|
# Resolve remotely
|
||||||
ipaddr = None
|
ipaddr = None
|
||||||
req = req + "\x03" + chr(len(destaddr)) + destaddr
|
req = req + "\x03" + chr(len(destaddr)) + destaddr
|
||||||
else:
|
else:
|
||||||
# Resolve locally
|
# Resolve locally
|
||||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||||
req = req + "\x01" + ipaddr
|
req = req + "\x01" + ipaddr
|
||||||
req = req + struct.pack(">H",destport)
|
req = req + struct.pack(">H",destport)
|
||||||
self.sendall(req)
|
self.sendall(req)
|
||||||
# Get the response
|
# Get the response
|
||||||
resp = self.__recvall(4)
|
resp = self.__recvall(4)
|
||||||
if resp[0] != "\x05":
|
if resp[0] != "\x05":
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
elif resp[1] != "\x00":
|
elif resp[1] != "\x00":
|
||||||
# Connection failed
|
# Connection failed
|
||||||
self.close()
|
self.close()
|
||||||
if ord(resp[1])<=8:
|
if ord(resp[1])<=8:
|
||||||
raise Socks5Error((ord(resp[1]),_generalerrors[ord(resp[1])]))
|
raise Socks5Error((ord(resp[1]),_generalerrors[ord(resp[1])]))
|
||||||
else:
|
else:
|
||||||
raise Socks5Error((9,_generalerrors[9]))
|
raise Socks5Error((9,_generalerrors[9]))
|
||||||
# Get the bound address/port
|
# Get the bound address/port
|
||||||
elif resp[3] == "\x01":
|
elif resp[3] == "\x01":
|
||||||
boundaddr = self.__recvall(4)
|
boundaddr = self.__recvall(4)
|
||||||
elif resp[3] == "\x03":
|
elif resp[3] == "\x03":
|
||||||
resp = resp + self.recv(1)
|
resp = resp + self.recv(1)
|
||||||
boundaddr = self.__recvall(ord(resp[4]))
|
boundaddr = self.__recvall(ord(resp[4]))
|
||||||
else:
|
else:
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
boundport = struct.unpack(">H",self.__recvall(2))[0]
|
boundport = struct.unpack(">H",self.__recvall(2))[0]
|
||||||
self.__proxysockname = (boundaddr,boundport)
|
self.__proxysockname = (boundaddr,boundport)
|
||||||
if ipaddr != None:
|
if ipaddr != None:
|
||||||
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
||||||
else:
|
else:
|
||||||
self.__proxypeername = (destaddr,destport)
|
self.__proxypeername = (destaddr,destport)
|
||||||
|
|
||||||
def getproxysockname(self):
|
def getproxysockname(self):
|
||||||
"""getsockname() -> address info
|
"""getsockname() -> address info
|
||||||
Returns the bound IP address and port number at the proxy.
|
Returns the bound IP address and port number at the proxy.
|
||||||
"""
|
"""
|
||||||
return self.__proxysockname
|
return self.__proxysockname
|
||||||
|
|
||||||
def getproxypeername(self):
|
def getproxypeername(self):
|
||||||
"""getproxypeername() -> address info
|
"""getproxypeername() -> address info
|
||||||
Returns the IP and port number of the proxy.
|
Returns the IP and port number of the proxy.
|
||||||
"""
|
"""
|
||||||
return _orgsocket.getpeername(self)
|
return _orgsocket.getpeername(self)
|
||||||
|
|
||||||
def getpeername(self):
|
def getpeername(self):
|
||||||
"""getpeername() -> address info
|
"""getpeername() -> address info
|
||||||
Returns the IP address and port number of the destination
|
Returns the IP address and port number of the destination
|
||||||
machine (note: getproxypeername returns the proxy)
|
machine (note: getproxypeername returns the proxy)
|
||||||
"""
|
"""
|
||||||
return self.__proxypeername
|
return self.__proxypeername
|
||||||
|
|
||||||
def __negotiatesocks4(self,destaddr,destport):
|
def __negotiatesocks4(self,destaddr,destport):
|
||||||
"""__negotiatesocks4(self,destaddr,destport)
|
"""__negotiatesocks4(self,destaddr,destport)
|
||||||
Negotiates a connection through a SOCKS4 server.
|
Negotiates a connection through a SOCKS4 server.
|
||||||
"""
|
"""
|
||||||
# Check if the destination address provided is an IP address
|
# Check if the destination address provided is an IP address
|
||||||
rmtrslv = False
|
rmtrslv = False
|
||||||
try:
|
try:
|
||||||
ipaddr = socket.inet_aton(destaddr)
|
ipaddr = socket.inet_aton(destaddr)
|
||||||
except socket.error:
|
except socket.error:
|
||||||
# It's a DNS name. Check where it should be resolved.
|
# It's a DNS name. Check where it should be resolved.
|
||||||
if self.__proxy[3]==True:
|
if self.__proxy[3]==True:
|
||||||
ipaddr = "\x00\x00\x00\x01"
|
ipaddr = "\x00\x00\x00\x01"
|
||||||
rmtrslv = True
|
rmtrslv = True
|
||||||
else:
|
else:
|
||||||
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
|
||||||
# Construct the request packet
|
# Construct the request packet
|
||||||
req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
|
req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
|
||||||
# The username parameter is considered userid for SOCKS4
|
# The username parameter is considered userid for SOCKS4
|
||||||
if self.__proxy[4] != None:
|
if self.__proxy[4] != None:
|
||||||
req = req + self.__proxy[4]
|
req = req + self.__proxy[4]
|
||||||
req = req + "\x00"
|
req = req + "\x00"
|
||||||
# DNS name if remote resolving is required
|
# DNS name if remote resolving is required
|
||||||
# NOTE: This is actually an extension to the SOCKS4 protocol
|
# NOTE: This is actually an extension to the SOCKS4 protocol
|
||||||
# called SOCKS4A and may not be supported in all cases.
|
# called SOCKS4A and may not be supported in all cases.
|
||||||
if rmtrslv==True:
|
if rmtrslv==True:
|
||||||
req = req + destaddr + "\x00"
|
req = req + destaddr + "\x00"
|
||||||
self.sendall(req)
|
self.sendall(req)
|
||||||
# Get the response from the server
|
# Get the response from the server
|
||||||
resp = self.__recvall(8)
|
resp = self.__recvall(8)
|
||||||
if resp[0] != "\x00":
|
if resp[0] != "\x00":
|
||||||
# Bad data
|
# Bad data
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if resp[1] != "\x5A":
|
if resp[1] != "\x5A":
|
||||||
# Server returned an error
|
# Server returned an error
|
||||||
self.close()
|
self.close()
|
||||||
if ord(resp[1]) in (91,92,93):
|
if ord(resp[1]) in (91,92,93):
|
||||||
self.close()
|
self.close()
|
||||||
raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
|
raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
|
||||||
else:
|
else:
|
||||||
raise Socks4Error((94,_socks4errors[4]))
|
raise Socks4Error((94,_socks4errors[4]))
|
||||||
# Get the bound address/port
|
# Get the bound address/port
|
||||||
self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
|
self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
|
||||||
if rmtrslv != None:
|
if rmtrslv != None:
|
||||||
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
|
||||||
else:
|
else:
|
||||||
self.__proxypeername = (destaddr,destport)
|
self.__proxypeername = (destaddr,destport)
|
||||||
|
|
||||||
def __negotiatehttp(self,destaddr,destport):
|
def __negotiatehttp(self,destaddr,destport):
|
||||||
"""__negotiatehttp(self,destaddr,destport)
|
"""__negotiatehttp(self,destaddr,destport)
|
||||||
Negotiates a connection through an HTTP server.
|
Negotiates a connection through an HTTP server.
|
||||||
"""
|
"""
|
||||||
# If we need to resolve locally, we do this now
|
# If we need to resolve locally, we do this now
|
||||||
if self.__proxy[3] == False:
|
if self.__proxy[3] == False:
|
||||||
addr = socket.gethostbyname(destaddr)
|
addr = socket.gethostbyname(destaddr)
|
||||||
else:
|
else:
|
||||||
addr = destaddr
|
addr = destaddr
|
||||||
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
|
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
|
||||||
# We read the response until we get the string "\r\n\r\n"
|
# We read the response until we get the string "\r\n\r\n"
|
||||||
resp = self.recv(1)
|
resp = self.recv(1)
|
||||||
while resp.find("\r\n\r\n")==-1:
|
while resp.find("\r\n\r\n")==-1:
|
||||||
resp = resp + self.recv(1)
|
resp = resp + self.recv(1)
|
||||||
# We just need the first line to check if the connection
|
# We just need the first line to check if the connection
|
||||||
# was successful
|
# was successful
|
||||||
statusline = resp.splitlines()[0].split(" ",2)
|
statusline = resp.splitlines()[0].split(" ",2)
|
||||||
if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
|
if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
try:
|
try:
|
||||||
statuscode = int(statusline[1])
|
statuscode = int(statusline[1])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
self.close()
|
self.close()
|
||||||
raise GeneralProxyError((1,_generalerrors[1]))
|
raise GeneralProxyError((1,_generalerrors[1]))
|
||||||
if statuscode != 200:
|
if statuscode != 200:
|
||||||
self.close()
|
self.close()
|
||||||
raise HTTPError((statuscode,statusline[2]))
|
raise HTTPError((statuscode,statusline[2]))
|
||||||
self.__proxysockname = ("0.0.0.0",0)
|
self.__proxysockname = ("0.0.0.0",0)
|
||||||
self.__proxypeername = (addr,destport)
|
self.__proxypeername = (addr,destport)
|
||||||
|
|
||||||
def connect(self,destpair):
|
def connect(self,destpair):
|
||||||
"""connect(self,despair)
|
"""connect(self,despair)
|
||||||
Connects to the specified destination through a proxy.
|
Connects to the specified destination through a proxy.
|
||||||
destpar - A tuple of the IP/DNS address and the port number.
|
destpar - A tuple of the IP/DNS address and the port number.
|
||||||
(identical to socket's connect).
|
(identical to socket's connect).
|
||||||
To select the proxy server use setproxy().
|
To select the proxy server use setproxy().
|
||||||
"""
|
"""
|
||||||
# Do a minimal input check first
|
# Do a minimal input check first
|
||||||
if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
|
if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
|
||||||
raise GeneralProxyError((5,_generalerrors[5]))
|
raise GeneralProxyError((5,_generalerrors[5]))
|
||||||
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
|
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 1080
|
portnum = 1080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatesocks5(destpair[0],destpair[1])
|
self.__negotiatesocks5(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
|
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 1080
|
portnum = 1080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatesocks4(destpair[0],destpair[1])
|
self.__negotiatesocks4(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == PROXY_TYPE_HTTP:
|
elif self.__proxy[0] == PROXY_TYPE_HTTP:
|
||||||
if self.__proxy[2] != None:
|
if self.__proxy[2] != None:
|
||||||
portnum = self.__proxy[2]
|
portnum = self.__proxy[2]
|
||||||
else:
|
else:
|
||||||
portnum = 8080
|
portnum = 8080
|
||||||
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
_orgsocket.connect(self,(self.__proxy[1],portnum))
|
||||||
self.__negotiatehttp(destpair[0],destpair[1])
|
self.__negotiatehttp(destpair[0],destpair[1])
|
||||||
elif self.__proxy[0] == None:
|
elif self.__proxy[0] == None:
|
||||||
_orgsocket.connect(self,(destpair[0],destpair[1]))
|
_orgsocket.connect(self,(destpair[0],destpair[1]))
|
||||||
else:
|
else:
|
||||||
raise GeneralProxyError((4,_generalerrors[4]))
|
raise GeneralProxyError((4,_generalerrors[4]))
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue