return list of dictionaries from peer_list, include peer node ids

This commit is contained in:
Jack Robison 2018-04-03 13:06:16 -04:00
parent 2903ccaeb4
commit 5cea031f38
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
3 changed files with 33 additions and 9 deletions

View file

@ -42,6 +42,7 @@ at anytime.
* if the `use_authentication` setting is configured, use authentication for all api methods instead of only those with the `auth_required` decorator * if the `use_authentication` setting is configured, use authentication for all api methods instead of only those with the `auth_required` decorator
* regenerate api keys on startup if the using authentication * regenerate api keys on startup if the using authentication
* support both positional and keyword args for api calls * support both positional and keyword args for api calls
* `peer_list` to return a list of dictionaries instead of a list of lists, added peer node ids to the results
### Added ### Added
* virtual kademlia network and mock udp transport for dht integration tests * virtual kademlia network and mock udp transport for dht integration tests

View file

@ -2850,6 +2850,7 @@ class Daemon(AuthJSONRPCServer):
response = yield self._render_response("Deleted %s" % blob_hash) response = yield self._render_response("Deleted %s" % blob_hash)
defer.returnValue(response) defer.returnValue(response)
@defer.inlineCallbacks
def jsonrpc_peer_list(self, blob_hash, timeout=None): def jsonrpc_peer_list(self, blob_hash, timeout=None):
""" """
Get peers for blob hash Get peers for blob hash
@ -2862,15 +2863,32 @@ class Daemon(AuthJSONRPCServer):
--timeout=<timeout> : (int) peer search timeout in seconds --timeout=<timeout> : (int) peer search timeout in seconds
Returns: Returns:
(list) List of contacts (list) List of contact dictionaries {'host': <peer ip>, 'port': <peer port>, 'node_id': <peer node id>}
""" """
timeout = timeout or conf.settings['peer_search_timeout'] if not utils.is_valid_blobhash(blob_hash):
raise Exception("invalid blob hash")
d = self.session.peer_finder.find_peers_for_blob(blob_hash, timeout=timeout) finished_deferred = self.session.dht_node.getPeersForBlob(binascii.unhexlify(blob_hash), True)
d.addCallback(lambda r: [[c.host, c.port, c.is_available()] for c in r])
d.addCallback(lambda r: self._render_response(r)) def _trigger_timeout():
return d if not finished_deferred.called:
log.debug("Peer search for %s timed out", blob_hash)
finished_deferred.cancel()
timeout = timeout or conf.settings['peer_search_timeout']
self.session.dht_node.reactor_callLater(timeout, _trigger_timeout)
peers = yield finished_deferred
results = [
{
"host": host,
"port": port,
"node_id": node_id
}
for host, port, node_id in peers
]
defer.returnValue(results)
@defer.inlineCallbacks @defer.inlineCallbacks
def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None): def jsonrpc_blob_announce(self, blob_hash=None, stream_hash=None, sd_hash=None):

View file

@ -249,7 +249,7 @@ class Node(object):
) )
@defer.inlineCallbacks @defer.inlineCallbacks
def getPeersForBlob(self, blob_hash): def getPeersForBlob(self, blob_hash, include_node_ids=False):
result = yield self.iterativeFindValue(blob_hash) result = yield self.iterativeFindValue(blob_hash)
expanded_peers = [] expanded_peers = []
if result: if result:
@ -257,8 +257,13 @@ class Node(object):
for peer in result[blob_hash]: for peer in result[blob_hash]:
host = ".".join([str(ord(d)) for d in peer[:4]]) host = ".".join([str(ord(d)) for d in peer[:4]])
port, = struct.unpack('>H', peer[4:6]) port, = struct.unpack('>H', peer[4:6])
if (host, port) not in expanded_peers: if not include_node_ids:
expanded_peers.append((host, port)) if (host, port) not in expanded_peers:
expanded_peers.append((host, port))
else:
peer_node_id = peer[6:].encode('hex')
if (host, port, peer_node_id) not in expanded_peers:
expanded_peers.append((host, port, peer_node_id))
defer.returnValue(expanded_peers) defer.returnValue(expanded_peers)
def get_most_popular_hashes(self, num_to_return): def get_most_popular_hashes(self, num_to_return):