2015-08-30 03:28:43 +02:00
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import gzip
|
2018-06-15 01:10:23 +02:00
|
|
|
import pyperclip
|
2015-08-30 03:28:43 +02:00
|
|
|
import urllib.request as request
|
|
|
|
import urllib.parse as parse
|
|
|
|
import urllib.error
|
2015-08-31 07:54:59 +02:00
|
|
|
import os.path
|
2015-08-30 03:28:43 +02:00
|
|
|
|
|
|
|
import pirate.data
|
2020-05-21 01:57:46 +02:00
|
|
|
import json
|
2015-08-30 03:28:43 +02:00
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
from datetime import datetime
|
2015-08-30 03:28:43 +02:00
|
|
|
from io import BytesIO
|
2015-09-03 09:25:17 +02:00
|
|
|
|
|
|
|
|
2015-09-20 23:14:00 +02:00
|
|
|
def parse_category(printer, category):
|
2015-09-03 09:18:11 +02:00
|
|
|
try:
|
|
|
|
category = int(category)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
if category in pirate.data.categories.values():
|
2015-09-03 09:05:33 +02:00
|
|
|
return category
|
|
|
|
elif category in pirate.data.categories.keys():
|
|
|
|
return pirate.data.categories[category]
|
2015-08-30 03:28:43 +02:00
|
|
|
else:
|
2015-09-20 23:14:00 +02:00
|
|
|
printer.print('Invalid category ignored', color='WARN')
|
2015-09-15 05:21:13 +02:00
|
|
|
return 0
|
2015-08-30 03:28:43 +02:00
|
|
|
|
2015-09-03 09:05:33 +02:00
|
|
|
|
2015-09-20 23:14:00 +02:00
|
|
|
def parse_sort(printer, sort):
|
2015-09-03 09:18:11 +02:00
|
|
|
try:
|
|
|
|
sort = int(sort)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
2020-05-22 10:04:15 +02:00
|
|
|
for key, val in pirate.data.sorts.items():
|
|
|
|
if sort == key or sort == val[0]:
|
|
|
|
return val[1:]
|
2015-08-30 03:28:43 +02:00
|
|
|
else:
|
2015-09-20 23:14:00 +02:00
|
|
|
printer.print('Invalid sort ignored', color='WARN')
|
2020-05-22 10:04:15 +02:00
|
|
|
return pirate.data.sorts['Default'][1:]
|
|
|
|
|
|
|
|
|
|
|
|
def parse_page(page):
|
|
|
|
results = []
|
|
|
|
try:
|
|
|
|
data = json.load(page)
|
|
|
|
except json.decoder.JSONDecodeError:
|
|
|
|
raise IOError('invalid JSON in API reply: blocked mirror?')
|
|
|
|
|
|
|
|
if len(data) == 1 and 'No results' in data[0]['name']:
|
|
|
|
return results
|
|
|
|
|
|
|
|
for res in data:
|
|
|
|
res['raw_size'] = int(res['size'])
|
|
|
|
res['size'] = pretty_size(int(res['size']))
|
|
|
|
res['magnet'] = build_magnet(res['name'], res['info_hash'])
|
|
|
|
res['info_hash'] = int(res['info_hash'], 16)
|
|
|
|
res['raw_uploaded'] = int(res['added'])
|
|
|
|
res['uploaded'] = pretty_date(res['added'])
|
|
|
|
res['seeders'] = int(res['seeders'])
|
|
|
|
res['leechers'] = int(res['leechers'])
|
|
|
|
res['category'] = int(res['category'])
|
|
|
|
results.append(res)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def sort_results(sort, res):
|
|
|
|
key, reverse = sort
|
|
|
|
return sorted(res, key=lambda x: x[key], reverse=reverse)
|
2015-09-03 09:05:33 +02:00
|
|
|
|
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
def pretty_size(size):
|
|
|
|
ranges = [('PiB', 1125899906842624),
|
|
|
|
('TiB', 1099511627776),
|
|
|
|
('GiB', 1073741824),
|
|
|
|
('MiB', 1048576),
|
|
|
|
('KiB', 1024)]
|
|
|
|
for unit, value in ranges:
|
|
|
|
if size >= value:
|
|
|
|
return '{:.1f} {}'.format(size/value, unit)
|
|
|
|
return str(size) + ' B'
|
2019-11-24 11:01:58 +01:00
|
|
|
|
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
def pretty_date(ts):
|
|
|
|
date = datetime.fromtimestamp(int(ts))
|
|
|
|
return date.strftime('%Y-%m-%d %H:%M')
|
2015-09-04 07:34:08 +02:00
|
|
|
|
2015-09-03 09:05:33 +02:00
|
|
|
|
2020-05-22 10:04:15 +02:00
|
|
|
def build_magnet(name, info_hash):
|
2020-05-21 01:57:46 +02:00
|
|
|
return 'magnet:?xt=urn:btih:{}&dn={}'.format(
|
|
|
|
info_hash, parse.quote(name, ''))
|
2020-03-10 14:33:25 +01:00
|
|
|
|
2015-08-30 03:28:43 +02:00
|
|
|
|
2020-05-21 16:32:01 +02:00
|
|
|
def build_request_path(mode, category, terms):
|
|
|
|
if mode == 'search':
|
2020-05-21 03:06:05 +02:00
|
|
|
query = '/q.php?q={}&cat={}'.format(' '.join(terms), category)
|
2020-05-21 16:32:01 +02:00
|
|
|
elif mode == 'top':
|
|
|
|
cat = 'all' if category == 0 else category
|
|
|
|
query = '/precompiled/data_top100_{}.json'.format(cat)
|
|
|
|
elif mode == 'recent':
|
|
|
|
query = '/precompiled/data_top100_recent.json'
|
|
|
|
elif mode == 'browse':
|
2020-05-25 10:02:00 +02:00
|
|
|
if category == 0:
|
|
|
|
raise Exception('You must specify a category')
|
|
|
|
query = '/q.php?q=category:{}'.format(category)
|
2020-05-21 16:32:01 +02:00
|
|
|
else:
|
|
|
|
raise Exception('Invalid mode', mode)
|
2020-05-21 03:06:05 +02:00
|
|
|
|
2020-05-21 16:32:01 +02:00
|
|
|
return parse.quote(query, '?=&/')
|
|
|
|
|
|
|
|
|
|
|
|
def remote(printer, category, sort, mode, terms, mirror, timeout):
|
|
|
|
query = build_request_path(mode, category, terms)
|
2020-05-21 01:57:46 +02:00
|
|
|
# Catch the Ctrl-C exception and exit cleanly
|
|
|
|
try:
|
|
|
|
req = request.Request(
|
2020-05-21 03:06:05 +02:00
|
|
|
mirror + query,
|
2020-05-21 01:57:46 +02:00
|
|
|
headers=pirate.data.default_headers)
|
|
|
|
try:
|
2020-05-21 02:28:03 +02:00
|
|
|
f = request.urlopen(req, timeout=timeout)
|
2020-05-21 01:57:46 +02:00
|
|
|
except urllib.error.URLError as e:
|
|
|
|
raise e
|
2020-03-31 01:14:38 +02:00
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
if f.info().get('Content-Encoding') == 'gzip':
|
|
|
|
f = gzip.GzipFile(fileobj=BytesIO(f.read()))
|
2015-09-04 05:25:24 +02:00
|
|
|
except KeyboardInterrupt:
|
2015-09-20 23:14:00 +02:00
|
|
|
printer.print('\nCancelled.')
|
2015-08-30 03:28:43 +02:00
|
|
|
sys.exit(0)
|
|
|
|
|
2020-05-22 10:04:15 +02:00
|
|
|
return sort_results(sort, parse_page(f))
|
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
|
2020-05-21 02:28:03 +02:00
|
|
|
def find_api(mirror, timeout):
|
2020-05-21 01:57:46 +02:00
|
|
|
# try common paths
|
|
|
|
for path in ['', '/apip', '/api.php?url=']:
|
|
|
|
req = request.Request(mirror + path + '/q.php?q=test&cat=0',
|
|
|
|
headers=pirate.data.default_headers)
|
|
|
|
try:
|
2020-05-21 02:28:03 +02:00
|
|
|
f = request.urlopen(req, timeout=timeout)
|
2020-05-21 01:57:46 +02:00
|
|
|
if f.info().get_content_type() == 'application/json':
|
|
|
|
return mirror + path
|
2020-05-21 03:30:17 +02:00
|
|
|
except urllib.error.URLError as e:
|
|
|
|
res = e.fp.read().decode()
|
|
|
|
if e.code == 503 and 'cf-browser-verification' in res:
|
|
|
|
raise IOError('Cloudflare protected')
|
2020-05-21 01:57:46 +02:00
|
|
|
|
|
|
|
# extract api path from main.js
|
|
|
|
req = request.Request(mirror + '/static/main.js',
|
|
|
|
headers=pirate.data.default_headers)
|
|
|
|
try:
|
2020-05-21 02:28:03 +02:00
|
|
|
f = request.urlopen(req, timeout=timeout)
|
2020-05-21 01:57:46 +02:00
|
|
|
if f.info().get_content_type() == 'application/javascript':
|
|
|
|
match = re.search("var server='([^']+)'", f.read().decode())
|
|
|
|
return mirror + match.group(1)
|
|
|
|
except urllib.error.URLError:
|
|
|
|
raise IOError('API not found: no main.js')
|
|
|
|
|
|
|
|
raise IOError('API not found')
|
2015-08-30 03:28:43 +02:00
|
|
|
|
|
|
|
|
2020-05-21 02:28:03 +02:00
|
|
|
def get_torrent(info_hash, timeout):
|
2017-09-06 19:27:00 +02:00
|
|
|
url = 'http://itorrents.org/torrent/{:X}.torrent'
|
2015-08-30 03:28:43 +02:00
|
|
|
req = request.Request(url.format(info_hash),
|
2015-09-17 08:15:27 +02:00
|
|
|
headers=pirate.data.default_headers)
|
2015-08-30 03:28:43 +02:00
|
|
|
req.add_header('Accept-encoding', 'gzip')
|
2015-09-17 08:15:27 +02:00
|
|
|
|
2020-05-21 02:28:03 +02:00
|
|
|
torrent = request.urlopen(req, timeout=timeout)
|
2015-08-30 03:28:43 +02:00
|
|
|
if torrent.info().get('Content-Encoding') == 'gzip':
|
|
|
|
torrent = gzip.GzipFile(fileobj=BytesIO(torrent.read()))
|
|
|
|
|
|
|
|
return torrent.read()
|
|
|
|
|
|
|
|
|
2020-05-21 02:28:03 +02:00
|
|
|
def save_torrents(printer, chosen_links, results, folder, timeout):
|
2015-08-30 03:28:43 +02:00
|
|
|
for link in chosen_links:
|
2020-05-21 01:57:46 +02:00
|
|
|
result = results[link]
|
|
|
|
torrent_name = result['name'].replace('/', '_').replace('\\', '_')
|
2015-08-30 03:28:43 +02:00
|
|
|
file = os.path.join(folder, torrent_name + '.torrent')
|
|
|
|
|
|
|
|
try:
|
2020-05-21 02:28:03 +02:00
|
|
|
torrent = get_torrent(result['info_hash'], timeout)
|
2018-09-15 04:14:29 +02:00
|
|
|
except urllib.error.HTTPError as e:
|
2018-09-15 03:11:29 +02:00
|
|
|
printer.print('There is no cached file for this torrent :('
|
|
|
|
' \nCode: {} - {}'.format(e.code, e.reason),
|
2016-07-07 03:51:13 +02:00
|
|
|
color='ERROR')
|
2015-08-30 03:28:43 +02:00
|
|
|
else:
|
2015-09-17 08:15:27 +02:00
|
|
|
open(file, 'wb').write(torrent)
|
2020-05-21 01:57:46 +02:00
|
|
|
printer.print('Saved {:X} in {}'.format(result['info_hash'], file))
|
2015-08-30 03:28:43 +02:00
|
|
|
|
|
|
|
|
2015-09-20 23:14:00 +02:00
|
|
|
def save_magnets(printer, chosen_links, results, folder):
|
2015-08-30 03:28:43 +02:00
|
|
|
for link in chosen_links:
|
2020-05-21 01:57:46 +02:00
|
|
|
result = results[link]
|
|
|
|
torrent_name = result['name'].replace('/', '_').replace('\\', '_')
|
2015-08-30 03:28:43 +02:00
|
|
|
file = os.path.join(folder, torrent_name + '.magnet')
|
|
|
|
|
2020-05-21 01:57:46 +02:00
|
|
|
printer.print('Saved {:X} in {}'.format(result['info_hash'], file))
|
2015-08-30 03:28:43 +02:00
|
|
|
with open(file, 'w') as f:
|
2020-05-21 01:57:46 +02:00
|
|
|
f.write(result['magnet'] + '\n')
|
2018-06-15 01:10:23 +02:00
|
|
|
|
2019-11-24 11:01:58 +01:00
|
|
|
|
2018-06-15 01:10:23 +02:00
|
|
|
def copy_magnets(printer, chosen_links, results):
|
|
|
|
clipboard_text = ''
|
|
|
|
for link in chosen_links:
|
2020-05-21 01:57:46 +02:00
|
|
|
result = results[link]
|
|
|
|
clipboard_text += result['magnet'] + "\n"
|
|
|
|
printer.print('Copying {:X} to clipboard'.format(result['info_hash']))
|
2018-06-15 01:10:23 +02:00
|
|
|
|
2018-06-27 15:33:07 +02:00
|
|
|
pyperclip.copy(clipboard_text)
|