Introduce requested changes, log when malicious / empty metadata received
This commit is contained in:
parent
9b12c25966
commit
0bb1a98415
@ -67,8 +67,11 @@ def main():
|
|||||||
|
|
||||||
complete_info_hashes = database.get_complete_info_hashes()
|
complete_info_hashes = database.get_complete_info_hashes()
|
||||||
|
|
||||||
node = dht.SybilNode(arguments.node_addr, max_metadata_size=arguments.metadata_size_limit)
|
node = dht.SybilNode(arguments.node_addr)
|
||||||
node.when_peer_found = on_peer_found
|
|
||||||
|
node.when_peer_found = lambda info_hash, peer_address: on_peer_found(info_hash=info_hash,
|
||||||
|
peer_address=peer_address,
|
||||||
|
max_metadata_size=arguments.max_metadata_size)
|
||||||
|
|
||||||
selector.register(node, selectors.EVENT_READ)
|
selector.register(node, selectors.EVENT_READ)
|
||||||
|
|
||||||
@ -223,8 +226,8 @@ def parse_cmdline_arguments() -> typing.Optional[argparse.Namespace]:
|
|||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--metadata-size-limit", type=parse_size, default=DEFAULT_MAX_METADATA_SIZE,
|
"--max-metadata-size", type=parse_size, default=DEFAULT_MAX_METADATA_SIZE,
|
||||||
help="Limit metadata size to protect memory overflow"
|
help="Limit metadata size to protect memory overflow. Provide in human friendly format eg. 1 M, 1 GB"
|
||||||
)
|
)
|
||||||
|
|
||||||
default_database_dir = os.path.join(appdirs.user_data_dir("magneticod"), "database.sqlite3")
|
default_database_dir = os.path.join(appdirs.user_data_dir("magneticod"), "database.sqlite3")
|
||||||
|
@ -40,6 +40,7 @@ class DisposablePeer:
|
|||||||
if res != errno.EINPROGRESS:
|
if res != errno.EINPROGRESS:
|
||||||
raise ConnectionError()
|
raise ConnectionError()
|
||||||
|
|
||||||
|
self.__peer_addr = peer_addr
|
||||||
self.__info_hash = info_hash
|
self.__info_hash = info_hash
|
||||||
|
|
||||||
self.__max_metadata_size = max_metadata_size
|
self.__max_metadata_size = max_metadata_size
|
||||||
@ -211,10 +212,16 @@ class DisposablePeer:
|
|||||||
# Just to make sure that the remote peer supports ut_metadata extension:
|
# Just to make sure that the remote peer supports ut_metadata extension:
|
||||||
ut_metadata = msg_dict[b"m"][b"ut_metadata"]
|
ut_metadata = msg_dict[b"m"][b"ut_metadata"]
|
||||||
metadata_size = msg_dict[b"metadata_size"]
|
metadata_size = msg_dict[b"metadata_size"]
|
||||||
assert metadata_size > 0, "Invalid (empty) metada size"
|
assert metadata_size > 0, "Invalid (empty) metadata size"
|
||||||
assert metadata_size < self.__max_metadata_size, "Malicious or malfunctioning peer tried send above " \
|
assert metadata_size < self.__max_metadata_size, "Malicious or malfunctioning peer {}:{} tried send above" \
|
||||||
"{} limit metadata size".format(self.__max_metadata_size)
|
" {} max metadata size".format(self.__peer_addr[0],
|
||||||
except (AssertionError, KeyError):
|
self.__peer_addr[1],
|
||||||
|
self.__max_metadata_size)
|
||||||
|
except KeyError:
|
||||||
|
self.when_error()
|
||||||
|
return
|
||||||
|
except AssertionError as e:
|
||||||
|
logging.debug(str(e))
|
||||||
self.when_error()
|
self.when_error()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ InfoHash = bytes
|
|||||||
|
|
||||||
|
|
||||||
class SybilNode:
|
class SybilNode:
|
||||||
def __init__(self, address: typing.Tuple[str, int], max_metadata_size: int=DEFAULT_MAX_METADATA_SIZE):
|
def __init__(self, address: typing.Tuple[str, int]):
|
||||||
self.__true_id = self.__random_bytes(20)
|
self.__true_id = self.__random_bytes(20)
|
||||||
|
|
||||||
self.__socket = socket.socket(type=socket.SOCK_DGRAM)
|
self.__socket = socket.socket(type=socket.SOCK_DGRAM)
|
||||||
@ -43,7 +43,6 @@ class SybilNode:
|
|||||||
self.__routing_table = {} # type: typing.Dict[NodeID, NodeAddress]
|
self.__routing_table = {} # type: typing.Dict[NodeID, NodeAddress]
|
||||||
|
|
||||||
self.__token_secret = self.__random_bytes(4)
|
self.__token_secret = self.__random_bytes(4)
|
||||||
self.__max_metadata_size = max_metadata_size
|
|
||||||
# Maximum number of neighbours (this is a THRESHOLD where, once reached, the search for new neighbours will
|
# Maximum number of neighbours (this is a THRESHOLD where, once reached, the search for new neighbours will
|
||||||
# stop; but until then, the total number of neighbours might exceed the threshold).
|
# stop; but until then, the total number of neighbours might exceed the threshold).
|
||||||
self.__n_max_neighbours = 2000
|
self.__n_max_neighbours = 2000
|
||||||
@ -51,8 +50,7 @@ class SybilNode:
|
|||||||
logging.info("SybilNode %s on %s initialized!", self.__true_id.hex().upper(), address)
|
logging.info("SybilNode %s on %s initialized!", self.__true_id.hex().upper(), address)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def when_peer_found(info_hash: InfoHash, peer_addr: PeerAddress,
|
def when_peer_found(info_hash: InfoHash, peer_addr: PeerAddress) -> None:
|
||||||
max_metadata_size: int=DEFAULT_MAX_METADATA_SIZE) -> None:
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def on_tick(self) -> None:
|
def on_tick(self) -> None:
|
||||||
@ -204,7 +202,7 @@ class SybilNode:
|
|||||||
else:
|
else:
|
||||||
peer_addr = (addr[0], port)
|
peer_addr = (addr[0], port)
|
||||||
|
|
||||||
self.when_peer_found(info_hash, peer_addr, self.__max_metadata_size)
|
self.when_peer_found(info_hash, peer_addr)
|
||||||
|
|
||||||
def fileno(self) -> int:
|
def fileno(self) -> int:
|
||||||
return self.__socket.fileno()
|
return self.__socket.fileno()
|
||||||
|
Loading…
Reference in New Issue
Block a user