mirror of
https://github.com/vikstrous/pirate-get
synced 2025-01-10 10:04:21 +01:00
added the ability to use a local download the db
This commit is contained in:
parent
e6bf79b444
commit
6fd505a722
@ -2,22 +2,73 @@
|
|||||||
import webbrowser
|
import webbrowser
|
||||||
import urllib
|
import urllib
|
||||||
import urllib2
|
import urllib2
|
||||||
import sys
|
|
||||||
import re
|
import re
|
||||||
|
from HTMLParser import HTMLParser
|
||||||
|
import argparse
|
||||||
|
|
||||||
if(len(sys.argv) == 1):
|
|
||||||
print "usage: pirate-get <search query>"
|
|
||||||
exit()
|
|
||||||
q = sys.argv[1]
|
|
||||||
f = urllib2.urlopen('http://thepiratebay.se/search/' + q.replace(" ", "+") + '/0/7/0')
|
|
||||||
|
|
||||||
mag = re.findall(""""(magnet\:\?xt=[^"]*)""", f.read())
|
# create a subclass and override the handler methods
|
||||||
|
class MyHTMLParser(HTMLParser):
|
||||||
|
title = ''
|
||||||
|
q = ''
|
||||||
|
state = 'looking'
|
||||||
|
results = []
|
||||||
|
|
||||||
if mag:
|
def __init__(self, q):
|
||||||
for m in range(len(mag)):
|
HTMLParser.__init__(self)
|
||||||
name = re.search("dn=([^\&]*)", mag[m])
|
self.q = q.lower()
|
||||||
print str(m) + ".", urllib.unquote(name.group(1).encode('ascii')).decode('utf-8').replace("+", " ")
|
|
||||||
l = raw_input("Select a link: ")
|
def handle_starttag(self, tag, attrs):
|
||||||
webbrowser.open(mag[int(l)])
|
if tag == 'title':
|
||||||
else:
|
self.state = 'title'
|
||||||
print "no results"
|
if tag == 'magnet' and self.state == 'matched':
|
||||||
|
self.state = 'magnet'
|
||||||
|
|
||||||
|
def handle_data(self, data):
|
||||||
|
if self.state == 'title':
|
||||||
|
if data.lower().find(self.q) != -1:
|
||||||
|
self.title = data
|
||||||
|
self.state = 'matched'
|
||||||
|
else:
|
||||||
|
self.state = 'looking'
|
||||||
|
if self.state == 'magnet':
|
||||||
|
self.results.append('magnet:?xt=urn:btih:' + urllib.quote(data) + '&dn=' + urllib.quote(self.title))
|
||||||
|
self.state = 'looking'
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='Finds and downloads torrents from the Pirate Bay')
|
||||||
|
|
||||||
|
def local(args):
|
||||||
|
xml_str = ''
|
||||||
|
with open(args.database, 'r') as f:
|
||||||
|
xml_str += f.read()
|
||||||
|
htmlparser = MyHTMLParser(args.q)
|
||||||
|
htmlparser.feed(xml_str)
|
||||||
|
return htmlparser.results
|
||||||
|
|
||||||
|
def remote(args):
|
||||||
|
f = urllib2.urlopen('http://thepiratebay.se/search/' + args.q.replace(" ", "+") + '/0/7/0')
|
||||||
|
return re.findall(""""(magnet\:\?xt=[^"]*)""", f.read())
|
||||||
|
|
||||||
|
parser.add_argument('q', metavar='search_term', help="The term to search for")
|
||||||
|
parser.add_argument('--local', dest='database', help="An xml file containing the Pirate Bay database")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.database:
|
||||||
|
mags = local(args)
|
||||||
|
else:
|
||||||
|
mags = remote(args)
|
||||||
|
|
||||||
|
if mags:
|
||||||
|
for m in range(len(mags)):
|
||||||
|
name = re.search("dn=([^\&]*)", mags[m])
|
||||||
|
print str(m) + ".", urllib.unquote(name.group(1).encode('ascii')).decode('utf-8').replace("+", " ")
|
||||||
|
l = raw_input("Select a link: ")
|
||||||
|
webbrowser.open(mags[int(l)])
|
||||||
|
else:
|
||||||
|
print "no results"
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
Loading…
Reference in New Issue
Block a user