Skip to content

Commit

Permalink
dht_crawler: clean in memory set for expired peers
Browse files Browse the repository at this point in the history
  • Loading branch information
shyba committed Sep 7, 2022
1 parent d0497cf commit 0e7a1ae
Showing 1 changed file with 10 additions and 1 deletion.
11 changes: 10 additions & 1 deletion scripts/dht_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,10 @@ async def open(self):
self.db.writer_connection.row_factory = dict_row_factory

async def all_peers(self):
return [DHTPeer(**peer) for peer in await self.db.execute_fetchall("select * from peer")]
return [
DHTPeer(**peer) for peer in await self.db.execute_fetchall(
"select * from peer where latency > 0 or last_seen < datetime('now', '-1h')")
]

async def save_peers(self, *peers):
log.info("Saving graph nodes (peers) to DB")
Expand Down Expand Up @@ -263,6 +266,11 @@ def get_peers_needing_check(self):
to_check = [peer for peer in self.all_peers if peer.last_check is None or peer.last_check < self.refresh_limit]
return to_check

def remove_expired_peers(self):
for key, peer in list(self._memory_peers.items()):
if (peer.latency or 0) < 1 and peer.last_seen < self.refresh_limit:
del self._memory_peers[key]

def add_peers(self, *peers):
for peer in peers:
db_peer = self.get_from_peer(peer)
Expand All @@ -278,6 +286,7 @@ async def flush_to_db(self):
connections_to_save = self._connections
self._connections = {}
# await self.db.save_connections(connections_to_save) heavy call
self.remove_expired_peers()

def get_from_peer(self, peer):
return self._memory_peers.get((peer.address, peer.udp_port), None)
Expand Down

0 comments on commit 0e7a1ae

Please sign in to comment.