fix mypy errors (although there are many # type: ignore
lying around)
This commit is contained in:
parent
6088ef6eec
commit
f4651fc6e3
@ -17,5 +17,5 @@ install:
|
||||
script:
|
||||
- "pylint ./magneticod/magneticod"
|
||||
- "pylint ./magneticow/magneticow"
|
||||
- "mypy ./magneticod/magneticod"
|
||||
- "mypy ./magneticow/magneticow"
|
||||
- "mypy --ignore-missing-imports --follow-imports=skip ./magneticod/magneticod"
|
||||
- "mypy --ignore-missing-imports --follow-imports=skip ./magneticow/magneticow"
|
@ -42,7 +42,7 @@ async def metadata_queue_watcher(database: persistence.Database, metadata_queue:
|
||||
logging.info("Corrupt metadata for %s! Ignoring.", info_hash.hex())
|
||||
|
||||
|
||||
def parse_ip_port(netloc) -> typing.Optional[typing.Tuple[str, int]]:
|
||||
def parse_ip_port(netloc: str) -> typing.Optional[typing.Tuple[str, int]]:
|
||||
# In case no port supplied
|
||||
try:
|
||||
return str(ipaddress.ip_address(netloc)), 0
|
||||
@ -55,9 +55,9 @@ def parse_ip_port(netloc) -> typing.Optional[typing.Tuple[str, int]]:
|
||||
ip = str(ipaddress.ip_address(parsed.hostname))
|
||||
port = parsed.port
|
||||
if port is None:
|
||||
raise argparse.ArgumentParser("Invalid node address supplied!")
|
||||
return None
|
||||
except ValueError:
|
||||
raise argparse.ArgumentParser("Invalid node address supplied!")
|
||||
return None
|
||||
|
||||
return ip, port
|
||||
|
||||
@ -69,7 +69,7 @@ def parse_size(value: str) -> int:
|
||||
raise argparse.ArgumentTypeError("Invalid argument. {}".format(e))
|
||||
|
||||
|
||||
def parse_cmdline_arguments(args) -> typing.Optional[argparse.Namespace]:
|
||||
def parse_cmdline_arguments(args: typing.List[str]) -> typing.Optional[argparse.Namespace]:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Autonomous BitTorrent DHT crawler and metadata fetcher.",
|
||||
epilog=textwrap.dedent("""\
|
||||
@ -142,7 +142,8 @@ def main() -> int:
|
||||
loop = asyncio.get_event_loop()
|
||||
node = dht.SybilNode(database.is_infohash_new, arguments.max_metadata_size)
|
||||
loop.create_task(node.launch(arguments.node_addr))
|
||||
metadata_queue_watcher_task = loop.create_task(metadata_queue_watcher(database, node.metadata_q()))
|
||||
# mypy ignored: mypy doesn't know (yet) about coroutines
|
||||
metadata_queue_watcher_task = loop.create_task(metadata_queue_watcher(database, node.metadata_q())) # type: ignore
|
||||
|
||||
try:
|
||||
asyncio.get_event_loop().run_forever()
|
||||
|
@ -23,25 +23,39 @@ from io import BytesIO
|
||||
|
||||
import better_bencode
|
||||
|
||||
Types = typing.Union[int, bytes, list, "KRPCDict"]
|
||||
KRPCDict = typing.Dict[bytes, Types]
|
||||
"""
|
||||
The type definitions under this comment is actually this:
|
||||
|
||||
KRPCTypes = typing.Union[int, bytes, "KRPCList", "KRPCDict"]
|
||||
KRPCList = typing.List[KRPCTypes]
|
||||
KRPCDict = typing.Dict[bytes, KRPCTypes]
|
||||
|
||||
But since mypy:
|
||||
* does NOT support self-referential types
|
||||
* have problems with complex Unions (in case you thought about expanding manually: I tried)
|
||||
|
||||
just write `typing.Any`. =(
|
||||
"""
|
||||
KRPCTypes = typing.Any
|
||||
KRPCList = typing.Any
|
||||
KRPCDict = typing.Any
|
||||
|
||||
|
||||
def dumps(obj) -> bytes:
|
||||
def dumps(obj: KRPCTypes) -> bytes:
|
||||
try:
|
||||
return better_bencode.dumps(obj)
|
||||
except:
|
||||
raise BencodeEncodingError()
|
||||
|
||||
|
||||
def loads(bytes_object: bytes) -> Types:
|
||||
def loads(bytes_object: bytes) -> KRPCTypes:
|
||||
try:
|
||||
return better_bencode.loads(bytes_object)
|
||||
except Exception as exc:
|
||||
raise BencodeDecodingError(exc)
|
||||
|
||||
|
||||
def loads2(bytes_object: bytes) -> typing.Tuple[Types, int]:
|
||||
def loads2(bytes_object: bytes) -> typing.Tuple[KRPCTypes, int]:
|
||||
"""
|
||||
Returns the bencoded object AND the index where the dump of the decoded object ends (exclusive). In less words:
|
||||
|
||||
|
@ -25,8 +25,10 @@ InfoHash = bytes
|
||||
PeerAddress = typing.Tuple[str, int]
|
||||
|
||||
|
||||
async def fetch_metadata_from_peer(info_hash: InfoHash, peer_addr: PeerAddress, max_metadata_size: int, timeout=None):
|
||||
async def fetch_metadata_from_peer(info_hash: InfoHash, peer_addr: PeerAddress, max_metadata_size: int, timeout=None) \
|
||||
-> typing.Optional[bytes]:
|
||||
try:
|
||||
# asyncio.wait_for "returns result of the Future or coroutine."Returns result of the Future or coroutine.
|
||||
return await asyncio.wait_for(DisposablePeer(info_hash, peer_addr, max_metadata_size).run(), timeout=timeout)
|
||||
except asyncio.TimeoutError:
|
||||
return None
|
||||
@ -42,25 +44,25 @@ class DisposablePeer:
|
||||
self.__info_hash = info_hash
|
||||
|
||||
self.__ext_handshake_complete = False # Extension Handshake
|
||||
self.__ut_metadata = None # Since we don't know ut_metadata code that remote peer uses...
|
||||
self.__ut_metadata = int() # Since we don't know ut_metadata code that remote peer uses...
|
||||
|
||||
self.__max_metadata_size = max_metadata_size
|
||||
self.__metadata_size = None
|
||||
self.__metadata_received = 0 # Amount of metadata bytes received...
|
||||
self.__metadata = None
|
||||
self.__metadata = bytearray()
|
||||
|
||||
self._run_task = None
|
||||
self._writer = None
|
||||
|
||||
|
||||
async def run(self):
|
||||
async def run(self) -> typing.Optional[bytes]:
|
||||
event_loop = asyncio.get_event_loop()
|
||||
self._metadata_future = event_loop.create_future()
|
||||
|
||||
try:
|
||||
self._reader, self._writer = await asyncio.open_connection(*self.__peer_addr, loop=event_loop)
|
||||
self._reader, self._writer = await asyncio.open_connection(*self.__peer_addr, loop=event_loop) # type: ignore
|
||||
# Send the BitTorrent handshake message (0x13 = 19 in decimal, the length of the handshake message)
|
||||
self._writer.write(b"\x13BitTorrent protocol%s%s%s" % (
|
||||
self._writer.write(b"\x13BitTorrent protocol%s%s%s" % ( # type: ignore
|
||||
b"\x00\x00\x00\x00\x00\x10\x00\x01",
|
||||
self.__info_hash,
|
||||
os.urandom(20)
|
||||
@ -124,7 +126,7 @@ class DisposablePeer:
|
||||
# In case you cannot read hex:
|
||||
# 0x14 = 20 (BitTorrent ID indicating that it's an extended message)
|
||||
# 0x00 = 0 (Extension ID indicating that it's the handshake message)
|
||||
self._writer.write(b"%b\x14%s" % (
|
||||
self._writer.write(b"%b\x14%s" % ( # type: ignore
|
||||
(2 + len(msg_dict_dump)).to_bytes(4, "big"),
|
||||
b'\0' + msg_dict_dump
|
||||
))
|
||||
@ -156,7 +158,7 @@ class DisposablePeer:
|
||||
|
||||
self.__ut_metadata = ut_metadata
|
||||
try:
|
||||
self.__metadata = bytearray(metadata_size)
|
||||
self.__metadata = bytearray(metadata_size) # type: ignore
|
||||
except MemoryError:
|
||||
logging.exception("Could not allocate %.1f KiB for the metadata!", metadata_size / 1024)
|
||||
raise
|
||||
@ -212,7 +214,7 @@ class DisposablePeer:
|
||||
# In case you cannot read_file hex:
|
||||
# 0x14 = 20 (BitTorrent ID indicating that it's an extended message)
|
||||
# 0x03 = 3 (Extension ID indicating that it's an ut_metadata message)
|
||||
self._writer.write(b"%b\x14%s%s" % (
|
||||
self._writer.write(b"%b\x14%s%s" % ( # type: ignore
|
||||
(2 + len(msg_dict_dump)).to_bytes(4, "big"),
|
||||
self.__ut_metadata.to_bytes(1, "big"),
|
||||
msg_dict_dump
|
||||
|
@ -58,8 +58,10 @@ class SybilNode(asyncio.DatagramProtocol):
|
||||
await asyncio.get_event_loop().create_datagram_endpoint(lambda: self, local_addr=address)
|
||||
logging.info("SybliNode is launched on %s!", address)
|
||||
|
||||
def connection_made(self, transport: asyncio.DatagramTransport) -> None:
|
||||
self._tick_task = asyncio.get_event_loop().create_task(self.tick_periodically())
|
||||
# mypy ignored: mypy errors because we explicitly stated `transport`s type =)
|
||||
def connection_made(self, transport: asyncio.DatagramTransport) -> None: # type: ignore
|
||||
# mypy ignored: mypy doesn't know (yet) about coroutines
|
||||
self._tick_task = asyncio.get_event_loop().create_task(self.tick_periodically()) # type: ignore
|
||||
self._transport = transport
|
||||
|
||||
def connection_lost(self, exc) -> None:
|
||||
@ -116,8 +118,9 @@ class SybilNode(asyncio.DatagramProtocol):
|
||||
self._routing_table.clear()
|
||||
if not self._is_writing_paused:
|
||||
self.__n_max_neighbours = self.__n_max_neighbours * 101 // 100
|
||||
# mypy ignore: because .child_count on Future is monkey-patched
|
||||
logging.debug("fetch metadata task count: %d", sum(
|
||||
x.child_count for x in self.__parent_futures.values()))
|
||||
x.child_count for x in self.__parent_futures.values())) # type: ignore
|
||||
logging.debug("asyncio task count: %d", len(asyncio.Task.all_tasks()))
|
||||
|
||||
def datagram_received(self, data, addr) -> None:
|
||||
@ -238,7 +241,8 @@ class SybilNode(asyncio.DatagramProtocol):
|
||||
# create the parent future
|
||||
if info_hash not in self.__parent_futures:
|
||||
parent_f = event_loop.create_future()
|
||||
parent_f.child_count = 0
|
||||
# mypy ignore: because .child_count on Future is being monkey-patched here!
|
||||
parent_f.child_count = 0 # type: ignore
|
||||
parent_f.add_done_callback(lambda f: self._parent_task_done(f, info_hash))
|
||||
self.__parent_futures[info_hash] = parent_f
|
||||
|
||||
@ -246,13 +250,15 @@ class SybilNode(asyncio.DatagramProtocol):
|
||||
|
||||
if parent_f.done():
|
||||
return
|
||||
if parent_f.child_count > MAX_ACTIVE_PEERS_PER_INFO_HASH:
|
||||
# mypy ignore: because .child_count on Future is monkey-patched
|
||||
if parent_f.child_count > MAX_ACTIVE_PEERS_PER_INFO_HASH: # type: ignore
|
||||
return
|
||||
|
||||
task = asyncio.ensure_future(bittorrent.fetch_metadata_from_peer(
|
||||
info_hash, peer_addr, self.__max_metadata_size, timeout=PEER_TIMEOUT))
|
||||
task.add_done_callback(lambda task: self._got_child_result(parent_f, task))
|
||||
parent_f.child_count += 1
|
||||
# mypy ignore: because .child_count on Future is monkey-patched
|
||||
parent_f.child_count += 1 # type: ignore
|
||||
parent_f.add_done_callback(lambda f: task.cancel())
|
||||
|
||||
def _got_child_result(self, parent_task, child_task):
|
||||
|
@ -65,7 +65,7 @@ class Database:
|
||||
files = []
|
||||
discovered_on = int(time.time())
|
||||
try:
|
||||
info = bencode.loads(metadata) # type: dict
|
||||
info = bencode.loads(metadata)
|
||||
|
||||
assert b"/" not in info[b"name"]
|
||||
name = info[b"name"].decode("utf-8")
|
||||
@ -85,7 +85,10 @@ class Database:
|
||||
return False
|
||||
|
||||
self.__pending_metadata.append((info_hash, name, sum(f[1] for f in files), discovered_on))
|
||||
self.__pending_files += files
|
||||
# MYPY BUG: error: Argument 1 to "__iadd__" of "list" has incompatible type List[Tuple[bytes, Any, str]];
|
||||
# expected Iterable[Tuple[bytes, int, bytes]]
|
||||
# List is an Iterable man...
|
||||
self.__pending_files += files # type: ignore
|
||||
|
||||
logging.info("Added: `%s`", name)
|
||||
|
||||
|
@ -44,7 +44,7 @@ def main() -> int:
|
||||
return 1
|
||||
|
||||
|
||||
def parse_args() -> dict:
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Lightweight web interface for magnetico.",
|
||||
epilog=textwrap.dedent("""\
|
||||
|
Loading…
Reference in New Issue
Block a user