2012-11-16 08:52:09 +01:00
|
|
|
#!/usr/bin/env python
|
2014-10-28 07:48:02 +01:00
|
|
|
#
|
|
|
|
# Copyright 2014, Viktor Stanchev and contributors
|
|
|
|
#
|
|
|
|
# This file is part of pirate-get.
|
|
|
|
#
|
|
|
|
# pirate-get is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# pirate-get is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU Affero General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
|
|
# along with pirate-get. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2013-12-14 14:30:11 +01:00
|
|
|
import os
|
2014-12-03 21:00:49 +01:00
|
|
|
import sys
|
2014-04-16 20:24:11 +02:00
|
|
|
import random
|
2014-12-03 19:38:24 +01:00
|
|
|
import re
|
|
|
|
import string
|
|
|
|
import gzip
|
|
|
|
import configparser
|
2013-02-21 21:19:40 +01:00
|
|
|
import argparse
|
2014-12-03 19:38:24 +01:00
|
|
|
import builtins
|
|
|
|
|
|
|
|
import webbrowser
|
|
|
|
import urllib.request as request
|
|
|
|
import urllib.parse as parse
|
|
|
|
|
|
|
|
from html.parser import HTMLParser
|
2014-12-04 15:58:32 +01:00
|
|
|
from urllib.error import URLError
|
2014-12-10 18:26:34 +01:00
|
|
|
from socket import timeout
|
2014-12-03 22:43:11 +01:00
|
|
|
from io import BytesIO
|
|
|
|
|
2015-01-31 16:20:28 +01:00
|
|
|
headers = {'User-Agent': 'pirate get'}
|
|
|
|
|
2014-12-03 22:43:11 +01:00
|
|
|
categories = {
|
2014-12-04 19:28:37 +01:00
|
|
|
'All': 0,
|
|
|
|
'Applications': 300,
|
|
|
|
'Applications/Android': 306,
|
|
|
|
'Applications/Handheld': 304,
|
|
|
|
'Applications/IOS (iPad/iPhone)': 305,
|
|
|
|
'Applications/Mac': 302,
|
|
|
|
'Applications/Other OS': 399,
|
|
|
|
'Applications/UNIX': 303,
|
|
|
|
'Applications/Windows': 301,
|
|
|
|
'Audio': 100,
|
|
|
|
'Audio/Audio books': 102,
|
|
|
|
'Audio/FLAC': 104,
|
|
|
|
'Audio/Music': 101,
|
|
|
|
'Audio/Other': 199,
|
|
|
|
'Audio/Sound clips': 103,
|
|
|
|
'Games': 400,
|
|
|
|
'Games/Android': 408,
|
|
|
|
'Games/Handheld': 406,
|
|
|
|
'Games/IOS (iPad/iPhone)': 407,
|
|
|
|
'Games/Mac': 402,
|
|
|
|
'Games/Other': 499,
|
|
|
|
'Games/PC': 401,
|
|
|
|
'Games/PSx': 403,
|
|
|
|
'Games/Wii': 405,
|
|
|
|
'Games/XBOX360': 404,
|
|
|
|
'Other': 600,
|
|
|
|
'Other/Comics': 602,
|
|
|
|
'Other/Covers': 604,
|
|
|
|
'Other/E-books': 601,
|
|
|
|
'Other/Other': 699,
|
|
|
|
'Other/Physibles': 605,
|
|
|
|
'Other/Pictures': 603,
|
|
|
|
'Porn': 500,
|
|
|
|
'Porn/Games': 504,
|
|
|
|
'Porn/HD - Movies': 505,
|
|
|
|
'Porn/Movie clips': 506,
|
|
|
|
'Porn/Movies': 501,
|
|
|
|
'Porn/Movies DVDR': 502,
|
|
|
|
'Porn/Other': 599,
|
|
|
|
'Porn/Pictures': 503,
|
|
|
|
'Video': 200,
|
|
|
|
'Video/3D': 209,
|
|
|
|
'Video/HD - Movies': 207,
|
|
|
|
'Video/HD - TV shows': 208,
|
|
|
|
'Video/Handheld': 206,
|
|
|
|
'Video/Movie clips': 204,
|
|
|
|
'Video/Movies': 201,
|
|
|
|
'Video/Movies DVDR': 202,
|
|
|
|
'Video/Music videos': 203,
|
|
|
|
'Video/Other': 299,
|
|
|
|
'Video/TV shows': 205}
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
sorts = {
|
2014-12-04 19:28:37 +01:00
|
|
|
'TitleDsc': 1, 'TitleAsc': 2,
|
|
|
|
'DateDsc': 3, 'DateAsc': 4,
|
|
|
|
'SizeDsc': 5, 'SizeAsc': 6,
|
|
|
|
'SeedersDsc': 7, 'SeedersAsc': 8,
|
|
|
|
'LeechersDsc': 9, 'LeechersAsc': 10,
|
|
|
|
'CategoryDsc': 13, 'CategoryAsc': 14,
|
|
|
|
'Default': 99}
|
2012-11-16 08:52:09 +01:00
|
|
|
|
2014-02-01 12:58:55 +01:00
|
|
|
|
2014-12-03 19:40:04 +01:00
|
|
|
class NoRedirection(request.HTTPErrorProcessor):
|
2014-12-04 19:28:37 +01:00
|
|
|
def http_response(self, _, res):
|
|
|
|
return res
|
2014-02-01 12:58:55 +01:00
|
|
|
|
|
|
|
https_response = http_response
|
2013-02-21 21:19:40 +01:00
|
|
|
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2013-02-21 21:19:40 +01:00
|
|
|
# create a subclass and override the handler methods
|
2014-12-10 18:26:34 +01:00
|
|
|
class BayParser(HTMLParser):
|
2013-02-21 21:19:40 +01:00
|
|
|
title = ''
|
|
|
|
q = ''
|
|
|
|
state = 'looking'
|
|
|
|
results = []
|
|
|
|
|
|
|
|
def __init__(self, q):
|
|
|
|
HTMLParser.__init__(self)
|
|
|
|
self.q = q.lower()
|
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
if tag == 'title':
|
|
|
|
self.state = 'title'
|
|
|
|
if tag == 'magnet' and self.state == 'matched':
|
|
|
|
self.state = 'magnet'
|
|
|
|
|
|
|
|
def handle_data(self, data):
|
|
|
|
if self.state == 'title':
|
|
|
|
if data.lower().find(self.q) != -1:
|
|
|
|
self.title = data
|
|
|
|
self.state = 'matched'
|
|
|
|
else:
|
|
|
|
self.state = 'looking'
|
|
|
|
if self.state == 'magnet':
|
2014-12-03 19:41:31 +01:00
|
|
|
self.results.append([
|
|
|
|
'magnet:?xt=urn:btih:' +
|
|
|
|
parse.quote(data) +
|
|
|
|
'&dn=' +
|
|
|
|
parse.quote(self.title), '?', '?'])
|
2013-02-21 21:19:40 +01:00
|
|
|
self.state = 'looking'
|
|
|
|
|
|
|
|
|
2014-12-03 22:43:11 +01:00
|
|
|
def print(*args, **kwargs):
|
|
|
|
if kwargs.get('color', False):
|
|
|
|
import colorama
|
|
|
|
colorama.init()
|
|
|
|
color_dict = {
|
2014-12-04 19:28:37 +01:00
|
|
|
'default': '',
|
|
|
|
'header': colorama.Back.BLACK + colorama.Fore.WHITE,
|
|
|
|
'alt': colorama.Fore.YELLOW,
|
|
|
|
'zebra_0': '',
|
|
|
|
'zebra_1': colorama.Fore.BLUE,
|
|
|
|
'WARN': colorama.Fore.MAGENTA,
|
|
|
|
'ERROR': colorama.Fore.RED}
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
try:
|
2014-12-04 19:28:37 +01:00
|
|
|
c = color_dict[kwargs.pop('color')]
|
2014-12-03 22:43:11 +01:00
|
|
|
args = (c + args[0],) + args[1:] + (colorama.Style.RESET_ALL,)
|
2014-12-04 15:58:32 +01:00
|
|
|
except (KeyError, IndexError):
|
2014-12-03 22:43:11 +01:00
|
|
|
pass
|
|
|
|
return builtins.print(*args, **kwargs)
|
|
|
|
else:
|
|
|
|
kwargs.pop('color', None)
|
|
|
|
return builtins.print(*args, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
#todo: redo this with html parser instead of regex
|
|
|
|
def remote(args, mirror):
|
|
|
|
res_l = []
|
2014-12-04 15:58:32 +01:00
|
|
|
pages = int(args.pages)
|
|
|
|
if pages < 1:
|
2014-12-04 19:28:37 +01:00
|
|
|
raise ValueError('Please provide an integer greater than 0 '
|
|
|
|
'for the number of pages to fetch.')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
if str(args.category) in categories.values():
|
|
|
|
category = args.category
|
|
|
|
elif args.category in categories.keys():
|
|
|
|
category = categories[args.category]
|
|
|
|
else:
|
2014-12-04 19:28:37 +01:00
|
|
|
category = '0'
|
|
|
|
print('Invalid category ignored', color='WARN')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
if str(args.sort) in sorts.values():
|
|
|
|
sort = args.sort
|
|
|
|
elif args.sort in sorts.keys():
|
|
|
|
sort = sorts[args.sort]
|
|
|
|
else:
|
2014-12-04 19:28:37 +01:00
|
|
|
sort = '99'
|
|
|
|
print('Invalid sort ignored', color='WARN')
|
2014-12-03 22:43:11 +01:00
|
|
|
# Catch the Ctrl-C exception and exit cleanly
|
|
|
|
try:
|
|
|
|
sizes = []
|
|
|
|
uploaded = []
|
|
|
|
identifiers = []
|
|
|
|
for page in range(pages):
|
|
|
|
if args.browse:
|
2014-12-04 19:28:37 +01:00
|
|
|
path = '/browse/'
|
|
|
|
if(category == '0'):
|
2014-12-03 22:43:11 +01:00
|
|
|
category = '100'
|
|
|
|
path = '/browse/' + '/'.join(str(i) for i in (
|
|
|
|
category, page, sort))
|
|
|
|
elif len(args.search) == 0:
|
2014-12-04 19:28:37 +01:00
|
|
|
path = '/top/48h' if args.recent else '/top/'
|
|
|
|
if(category == '0'):
|
2014-12-03 22:43:11 +01:00
|
|
|
path += 'all'
|
|
|
|
else:
|
2014-12-04 19:28:37 +01:00
|
|
|
path += str(category)
|
2014-12-03 22:43:11 +01:00
|
|
|
else:
|
|
|
|
path = '/search/' + '/'.join(str(i) for i in (
|
2014-12-04 19:28:37 +01:00
|
|
|
'+'.join(args.search),
|
2014-12-03 22:43:11 +01:00
|
|
|
page, sort,
|
|
|
|
category))
|
|
|
|
|
2015-01-31 16:20:28 +01:00
|
|
|
req = request.Request(mirror + path, headers=headers)
|
2014-12-03 22:43:11 +01:00
|
|
|
req.add_header('Accept-encoding', 'gzip')
|
2015-01-31 16:20:28 +01:00
|
|
|
f = request.urlopen(req, timeout=5)
|
2014-12-03 22:43:11 +01:00
|
|
|
if f.info().get('Content-Encoding') == 'gzip':
|
|
|
|
f = gzip.GzipFile(fileobj=BytesIO(f.read()))
|
|
|
|
res = f.read().decode('utf-8')
|
|
|
|
found = re.findall(r'"(magnet\:\?xt=[^"]*)|<td align="right">'
|
|
|
|
r'([^<]+)</td>', res)
|
|
|
|
|
|
|
|
# check for a blocked mirror
|
2014-12-04 19:28:37 +01:00
|
|
|
no_results = re.search(r'"No hits\.', res)
|
2014-12-10 18:26:34 +01:00
|
|
|
if found == [] and no_results is None:
|
2014-12-03 22:43:11 +01:00
|
|
|
# Contradiction - we found no results,
|
|
|
|
# but the page didn't say there were no results.
|
|
|
|
# The page is probably not actually the pirate bay,
|
|
|
|
# so let's try another mirror
|
2014-12-04 19:28:37 +01:00
|
|
|
raise IOError('Blocked mirror detected.')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
# get sizes as well and substitute the character
|
2014-12-04 19:28:37 +01:00
|
|
|
sizes.extend([match.replace(' ', ' ').split()
|
|
|
|
for match in re.findall(r'(?<=Size )[0-9.]'
|
|
|
|
r'+\ \;[KMGT]*[i ]*B', res)])
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
uploaded.extend([match.replace(' ', ' ')
|
|
|
|
for match in re.findall(r'(?<=Uploaded )'
|
|
|
|
r'.+(?=\, Size)',res)])
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
identifiers.extend([match.replace(' ', ' ')
|
|
|
|
for match in re.findall('(?<=/torrent/)'
|
|
|
|
'[0-9]+(?=/)',res)])
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
state = 'seeds'
|
2014-12-03 22:43:11 +01:00
|
|
|
curr = ['', 0, 0] #magnet, seeds, leeches
|
|
|
|
for f in found:
|
|
|
|
if f[1] == '':
|
|
|
|
curr[0] = f[0]
|
|
|
|
else:
|
|
|
|
if state == 'seeds':
|
|
|
|
curr[1] = f[1]
|
|
|
|
state = 'leeches'
|
|
|
|
else:
|
|
|
|
curr[2] = f[1]
|
|
|
|
state = 'seeds'
|
|
|
|
res_l.append(curr)
|
|
|
|
curr = ['', 0, 0]
|
|
|
|
except KeyboardInterrupt :
|
2014-12-04 19:28:37 +01:00
|
|
|
print('\nCancelled.')
|
2014-12-03 22:43:11 +01:00
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
# return the sizes in a spearate list
|
|
|
|
return res_l, sizes, uploaded, identifiers
|
|
|
|
|
|
|
|
|
2014-12-10 18:26:34 +01:00
|
|
|
def local(db, search):
|
2015-01-31 16:21:14 +01:00
|
|
|
xml = open(db).readlines()
|
|
|
|
parser = BayParser(' '.join(search))
|
|
|
|
parser.feed(''.join(xml))
|
2014-12-10 18:26:34 +01:00
|
|
|
return parser.results
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
# load user options, to override default ones
|
|
|
|
def config_to_load():
|
|
|
|
if os.path.isfile(os.path.expandvars('$XDG_CONFIG_HOME/pirate-get')):
|
|
|
|
return os.path.expandvars('$XDG_CONFIG_HOME/pirate-get')
|
|
|
|
else:
|
|
|
|
return os.path.expanduser('~/.config/pirate-get')
|
|
|
|
|
|
|
|
|
|
|
|
# enhanced print output with column titles
|
|
|
|
def print_search_results(mags, sizes, uploaded):
|
2014-12-04 19:50:55 +01:00
|
|
|
columns = int(os.popen('stty size', 'r').read().split()[1]) - 52
|
2014-12-04 19:28:37 +01:00
|
|
|
cur_color = 'zebra_0'
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:29:10 +01:00
|
|
|
print('{:>4} {:>5} {:>5} {:>5} {:9} {:11} {:{length}}'.format(
|
|
|
|
'LINK', 'SEED', 'LEECH', 'RATIO',
|
|
|
|
'SIZE', 'UPLOAD', 'NAME', length=columns),
|
|
|
|
color='header')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
for m, magnet in enumerate(mags):
|
2014-12-03 22:43:11 +01:00
|
|
|
no_seeders = int(magnet[1])
|
|
|
|
no_leechers = int(magnet[2])
|
2014-12-04 19:28:37 +01:00
|
|
|
name = re.search(r'dn=([^\&]*)', magnet[0])
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
# compute the S/L ratio (Higher is better)
|
|
|
|
try:
|
2014-12-04 19:28:37 +01:00
|
|
|
ratio = no_seeders / no_leechers
|
2014-12-03 22:43:11 +01:00
|
|
|
except ZeroDivisionError:
|
2014-12-04 19:29:10 +01:00
|
|
|
ratio = float('inf')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
# Alternate between colors
|
2014-12-04 19:28:37 +01:00
|
|
|
cur_color = 'zebra_0' if (cur_color == 'zebra_1') else 'zebra_1'
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
torrent_name = parse.unquote(name.group(1)).replace('+', ' ')
|
2014-12-03 22:43:11 +01:00
|
|
|
# enhanced print output with justified columns
|
2014-12-04 20:03:40 +01:00
|
|
|
print('{:4} {:5} {:5} {:5.1f} {:5.1f} {:3} '
|
|
|
|
'{:<11} {:{length}}'.format(m, no_seeders, no_leechers,
|
|
|
|
ratio, float(sizes[m][0]),
|
|
|
|
sizes[m][1], uploaded[m],
|
|
|
|
torrent_name[:columns],
|
|
|
|
length=columns),
|
|
|
|
color=cur_color)
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def print_descriptions(chosen_links, mags, site, identifiers):
|
|
|
|
for link in chosen_links:
|
|
|
|
link = int(link)
|
|
|
|
path = '/torrent/%s/' % identifiers[link]
|
2015-01-31 16:20:28 +01:00
|
|
|
req = request.Request(site + path, headers=headers)
|
2014-12-03 22:43:11 +01:00
|
|
|
req.add_header('Accept-encoding', 'gzip')
|
|
|
|
f = request.urlopen(req)
|
|
|
|
|
|
|
|
if f.info().get('Content-Encoding') == 'gzip':
|
|
|
|
f = gzip.GzipFile(fileobj=BytesIO(f.read()))
|
|
|
|
|
|
|
|
res = f.read().decode('utf-8')
|
2014-12-04 19:28:37 +01:00
|
|
|
name = re.search(r'dn=([^\&]*)', mags[link][0])
|
|
|
|
torrent_name = parse.unquote(name.group(1)).replace('+', ' ')
|
|
|
|
desc = re.search(r'<div class="nfo">\s*<pre>(.+?)(?=</pre>)',
|
2014-12-03 22:43:11 +01:00
|
|
|
res, re.DOTALL).group(1)
|
|
|
|
|
|
|
|
# Replace HTML links with markdown style versions
|
2014-12-04 19:28:37 +01:00
|
|
|
desc = re.sub(r'<a href="\s*([^"]+?)\s*"[^>]*>(\s*)([^<]+?)(\s*'
|
|
|
|
r')</a>', r'\2[\3](\1)\4', desc)
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Description for "' + torrent_name + '":', color='zebra_1')
|
|
|
|
print(desc, color='zebra_0')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
def print_fileLists(chosen_links, mags, site, identifiers):
|
|
|
|
for link in chosen_links:
|
|
|
|
path = '/ajax_details_filelist.php'
|
|
|
|
query = '?id=' + identifiers[int(link)]
|
2015-01-31 16:20:28 +01:00
|
|
|
req = request.Request(site + path + query, headers=headers)
|
2014-12-03 22:43:11 +01:00
|
|
|
req.add_header('Accept-encoding', 'gzip')
|
|
|
|
f = request.urlopen(req)
|
|
|
|
|
|
|
|
if f.info().get('Content-Encoding') == 'gzip':
|
|
|
|
f = gzip.GzipFile(fileobj=BytesIO(f.read()))
|
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
res = f.read().decode('utf-8').replace(' ', ' ')
|
|
|
|
files = re.findall(r'<td align="left">\s*([^<]+?)\s*</td><td ali'
|
|
|
|
r'gn="right">\s*([^<]+?)\s*</tr>', res)
|
|
|
|
name = re.search(r'dn=([^\&]*)', mags[int(link)][0])
|
|
|
|
torrent_name = parse.unquote(name.group(1)).replace('+', ' ')
|
2014-12-03 22:43:11 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Files in "' + torrent_name + '":', color='zebra_1')
|
|
|
|
cur_color = 'zebra_0'
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
for f in files:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('{0[0]:>11} {0[1]}'.format(f), color=cur_color)
|
|
|
|
cur_color = 'zebra_0' if (cur_color == 'zebra_1') else 'zebra_1'
|
2014-12-03 22:43:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
|
|
|
|
return ''.join(random.choice(chars) for _ in range(size))
|
|
|
|
|
|
|
|
|
2013-02-21 21:19:40 +01:00
|
|
|
def main():
|
2014-04-16 20:24:11 +02:00
|
|
|
# new ConfigParser
|
2014-12-03 19:40:04 +01:00
|
|
|
config = configparser.ConfigParser()
|
2014-04-16 20:24:11 +02:00
|
|
|
|
2014-12-03 22:43:51 +01:00
|
|
|
# default options so we don't die later
|
2014-04-16 20:24:11 +02:00
|
|
|
config.add_section('SaveToFile')
|
2014-12-03 19:43:12 +01:00
|
|
|
config.set('SaveToFile', 'enabled', 'false')
|
|
|
|
config.set('SaveToFile', 'directory', '~/downloads/pirate-get/')
|
2014-04-16 20:24:11 +02:00
|
|
|
|
2014-11-11 09:05:05 +01:00
|
|
|
config.read([config_to_load()])
|
2014-04-16 20:24:11 +02:00
|
|
|
|
2014-12-03 19:47:38 +01:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description='finds and downloads torrents from the Pirate Bay')
|
|
|
|
parser.add_argument('-b', dest='browse',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='display in Browse mode')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('search', metavar='search',
|
2014-12-04 19:28:37 +01:00
|
|
|
nargs='*', help='term to search for')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-c', dest='category', metavar='category',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='specify a category to search', default='All')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-s', dest='sort', metavar='sort',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='specify a sort option', default='SeedersDsc')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-R', dest='recent', action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='torrents uploaded in the last 48hours.'
|
|
|
|
'*ignored in searches*')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-l', dest='list_categories',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='list categories')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('--list_sorts', dest='list_sorts',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='list Sortable Types')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-t', dest='transmission',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='call transmission-remote to start the download')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('--custom', dest='command',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='call custom command, %%s will be replaced with'
|
|
|
|
'the url')
|
2014-12-10 18:26:34 +01:00
|
|
|
parser.add_argument('-L', '--local', dest='database',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='an xml file containing the Pirate Bay database')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-p', dest='pages', default=1,
|
|
|
|
help="the number of pages to fetch (doesn't work with"
|
2014-12-04 19:28:37 +01:00
|
|
|
'--local)')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-0', dest='first',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='choose the top result')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('-a', dest='download_all',
|
|
|
|
action='store_true',
|
2014-12-04 19:28:37 +01:00
|
|
|
help='download all results')
|
2014-12-03 19:47:38 +01:00
|
|
|
parser.add_argument('--color', dest='color',
|
2014-12-03 21:31:19 +01:00
|
|
|
action='store_false', default=True,
|
2014-12-04 19:28:37 +01:00
|
|
|
help='disable colored output')
|
2013-02-21 21:19:40 +01:00
|
|
|
args = parser.parse_args()
|
2014-02-15 16:55:28 +01:00
|
|
|
|
2014-06-06 06:49:07 +02:00
|
|
|
if args.list_categories:
|
2014-12-04 19:28:37 +01:00
|
|
|
cur_color = 'zebra_0'
|
|
|
|
for key, value in sorted(categories.items()) :
|
|
|
|
cur_color = 'zebra_0' if (cur_color == 'zebra_1') else 'zebra_1'
|
|
|
|
print(str(value), '\t', key, sep='', color=cur_color)
|
2014-06-06 06:49:07 +02:00
|
|
|
return
|
2014-10-28 04:28:48 +01:00
|
|
|
|
|
|
|
if args.list_sorts:
|
2014-12-04 19:28:37 +01:00
|
|
|
cur_color = 'zebra_0'
|
|
|
|
for key, value in sorted(sorts.items()):
|
|
|
|
cur_color = 'zebra_0' if (cur_color == 'zebra_1') else 'zebra_1'
|
|
|
|
print(str(value), '\t', key, sep='', color=cur_color)
|
2014-10-28 04:28:48 +01:00
|
|
|
return
|
|
|
|
|
2013-02-21 21:19:40 +01:00
|
|
|
if args.database:
|
2014-12-10 18:26:34 +01:00
|
|
|
mags = local(args.database, args.search)
|
2013-02-21 21:19:40 +01:00
|
|
|
else:
|
2015-01-31 16:20:28 +01:00
|
|
|
mags, mirrors = [], ['https://thepiratebay.se']
|
2014-02-01 10:42:58 +01:00
|
|
|
try:
|
2014-12-03 19:40:04 +01:00
|
|
|
opener = request.build_opener(NoRedirection)
|
2014-12-04 19:28:37 +01:00
|
|
|
f = opener.open('https://proxybay.info/list.txt', timeout=5)
|
2014-02-01 12:58:55 +01:00
|
|
|
if f.getcode() != 200:
|
2014-12-04 19:28:37 +01:00
|
|
|
raise IOError('The pirate bay responded with an error.')
|
2014-12-10 18:26:34 +01:00
|
|
|
mirrors.extend([i.decode('utf-8').strip()
|
|
|
|
for i in f.readlines()][3:])
|
2014-12-03 22:43:51 +01:00
|
|
|
except IOError:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Could not fetch additional mirrors', color='WARN')
|
2014-12-10 18:26:34 +01:00
|
|
|
print(*mirrors, sep="\n")
|
2014-02-01 10:42:58 +01:00
|
|
|
for mirror in mirrors:
|
|
|
|
try:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Trying', mirror, end='... ')
|
2014-07-27 08:06:59 +02:00
|
|
|
mags, sizes, uploaded, identifiers = remote(args, mirror)
|
2014-12-03 22:43:51 +01:00
|
|
|
site = mirror
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Ok', color='alt')
|
2014-02-01 10:42:58 +01:00
|
|
|
break
|
2014-12-10 18:26:34 +01:00
|
|
|
except (URLError, IOError, ValueError, timeout) as e:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Failed', color='WARN')
|
2014-12-10 18:26:34 +01:00
|
|
|
else:
|
|
|
|
print('No available mirrors :(', color='WARN')
|
|
|
|
return
|
2014-12-03 22:43:51 +01:00
|
|
|
|
2014-12-04 20:03:40 +01:00
|
|
|
if not mags:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('No results')
|
2014-02-15 18:13:16 +01:00
|
|
|
return
|
2014-12-03 19:47:38 +01:00
|
|
|
|
2014-12-03 22:43:11 +01:00
|
|
|
print_search_results(mags, sizes, uploaded)
|
2014-02-15 18:13:16 +01:00
|
|
|
|
|
|
|
if args.first:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Choosing first result')
|
2014-05-12 06:14:22 +02:00
|
|
|
choices = [0]
|
2014-07-05 06:39:16 +02:00
|
|
|
elif args.download_all:
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Downloading all results')
|
2014-12-03 19:47:38 +01:00
|
|
|
choices = range(len(mags))
|
2014-02-15 18:13:16 +01:00
|
|
|
else:
|
2014-07-27 08:06:59 +02:00
|
|
|
# New input loop to support different link options
|
2014-07-28 07:54:35 +02:00
|
|
|
while True:
|
|
|
|
try:
|
2014-12-04 19:28:37 +01:00
|
|
|
print("\nSelect links (Type 'h' for more options"
|
|
|
|
", 'q' to quit)", end='\b', color='alt')
|
|
|
|
l=input(': ')
|
2014-07-28 07:54:35 +02:00
|
|
|
except KeyboardInterrupt :
|
2014-12-04 19:28:37 +01:00
|
|
|
print('\nCancelled.')
|
|
|
|
return
|
2014-07-28 07:54:35 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
# Very permissive handling
|
2014-10-29 07:26:37 +01:00
|
|
|
# Check for any occurances or d, f, p or q
|
|
|
|
cmd_code_match = re.search(r'([hdfpq])', l, flags=re.IGNORECASE)
|
2014-07-28 07:54:35 +02:00
|
|
|
if cmd_code_match:
|
|
|
|
code = cmd_code_match.group(0).lower()
|
2014-07-27 08:06:59 +02:00
|
|
|
else:
|
|
|
|
code = None
|
2014-12-03 22:43:51 +01:00
|
|
|
|
2014-07-28 07:54:35 +02:00
|
|
|
# Clean up command codes
|
2014-12-03 19:47:38 +01:00
|
|
|
# Substitute multiple consecutive spaces/commas for single comma
|
2014-07-28 07:54:35 +02:00
|
|
|
# Remove anything that isn't an integer or comma.
|
|
|
|
# Turn into list
|
2014-12-04 19:28:37 +01:00
|
|
|
l = re.sub(r'^[hdfp, ]*|[hdfp, ]*$', '', l)
|
|
|
|
l = re.sub('[ ,]+', ',', l)
|
|
|
|
l = re.sub('[^0-9,]', '', l)
|
|
|
|
choices = l.split(',')
|
2014-12-03 22:43:51 +01:00
|
|
|
|
2014-07-27 08:06:59 +02:00
|
|
|
# Act on option, if supplied
|
2014-12-04 19:28:37 +01:00
|
|
|
print('')
|
2014-07-27 08:06:59 +02:00
|
|
|
if code == 'h':
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Options:',
|
|
|
|
'<links>: Download selected torrents',
|
|
|
|
'[d<links>]: Get descriptions',
|
|
|
|
'[f<links>]: Get files',
|
|
|
|
'[p] Print search results',
|
|
|
|
'[q] Quit', sep='\n')
|
2014-10-29 07:26:37 +01:00
|
|
|
elif code == 'q':
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Bye.', color='alt')
|
|
|
|
return
|
2014-07-27 08:06:59 +02:00
|
|
|
elif code == 'd':
|
2014-12-03 22:43:11 +01:00
|
|
|
print_descriptions(choices, mags, site, identifiers)
|
2014-07-27 08:06:59 +02:00
|
|
|
elif code == 'f':
|
2014-12-03 22:43:11 +01:00
|
|
|
print_fileLists(choices, mags, site, identifiers)
|
2014-07-27 08:06:59 +02:00
|
|
|
elif code == 'p':
|
2014-12-03 22:43:11 +01:00
|
|
|
print_search_results(mags, sizes, uploaded)
|
2014-07-27 08:06:59 +02:00
|
|
|
elif not l:
|
2014-12-04 19:29:10 +01:00
|
|
|
print('No links entered!', color='WARN')
|
2014-07-27 08:06:59 +02:00
|
|
|
else:
|
|
|
|
break
|
2014-12-03 19:42:47 +01:00
|
|
|
except Exception as e:
|
2014-12-04 19:29:10 +01:00
|
|
|
print('Exception:', e, color='ERROR')
|
2014-07-28 07:54:35 +02:00
|
|
|
choices = ()
|
2014-02-15 18:13:16 +01:00
|
|
|
|
2014-12-03 19:47:53 +01:00
|
|
|
if config.getboolean('SaveToFile', 'enabled'):
|
2014-04-16 20:24:11 +02:00
|
|
|
# Save to file is enabled
|
2014-12-03 19:47:38 +01:00
|
|
|
fileName = os.path.expanduser(
|
|
|
|
config.get('SaveToFile','directory')
|
|
|
|
) + id_generator() + '.magnet'
|
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
print('Saving to File:', fileName)
|
2014-12-03 19:47:38 +01:00
|
|
|
|
2014-04-16 20:24:11 +02:00
|
|
|
f = open(fileName, 'w')
|
|
|
|
for choice in choices:
|
|
|
|
choice = int(choice)
|
|
|
|
url = mags[choice][0]
|
|
|
|
f.write(url + '\n')
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
else:
|
|
|
|
# use transmission as default
|
|
|
|
for choice in choices:
|
2014-12-04 19:29:10 +01:00
|
|
|
url = mags[int(choice)][0]
|
2014-04-16 20:24:11 +02:00
|
|
|
print(url)
|
|
|
|
if args.transmission:
|
2014-12-03 19:47:38 +01:00
|
|
|
os.system('transmission-remote --add "%s" ' % (url))
|
|
|
|
os.system('transmission-remote -l')
|
2014-05-08 17:17:24 +02:00
|
|
|
elif args.command:
|
2014-07-28 07:54:35 +02:00
|
|
|
os.system(args.command % (url))
|
2014-04-16 20:24:11 +02:00
|
|
|
else:
|
|
|
|
webbrowser.open(url)
|
|
|
|
|
2013-02-21 21:19:40 +01:00
|
|
|
|
2014-12-04 19:28:37 +01:00
|
|
|
if __name__ == '__main__':
|
2013-02-21 21:19:40 +01:00
|
|
|
main()
|