diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 4fee28c85..8cfce0048 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -19,6 +19,6 @@ values = [bumpversion:file:CHANGELOG.md] search = [Unreleased] replace = [Unreleased] - - \#\# [{new_version}] - {now:%Y-%m-%d} + + \#\# [{new_version}] - {now:%Y-%m-%d} diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..ef9566b29 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +/CHANGELOG.md merge=union \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index b7ad010a6..1d06c0a50 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,12 +8,36 @@ can and probably will change functionality and break backwards compatability at anytime. ## [Unreleased] +\#\# [0.8.6] - 2017-02-19 +\#\# [0.8.6rc0] - 2017-02-19 +### Changed + * Add `file_get` by stream hash + * Add utils.call_later to replace reactor.callLater +### + * Fix unhandled error in `get` + * Fix sd blob timeout handling in `get_availability`, return 0.0 + + +## [0.8.5] - 2017-02-18 + +## [0.8.5rc0] - 2017-02-18 +### Fixed + * Fix result expected by ui from file_get for missing files + + +## [0.8.4] - 2017-02-17 + +## [0.8.4rc0] - 2017-02-17 ### Changed * Remove unused upload_allowed option * Remove code related to packaging as that step is now done in the electron client * Remove lbryum version check; use lbry-electron as version source * Include download url in version check +### Fixed + * add misssing traceback to logging + + ## [0.8.3] - 2017-02-15 ### Fixed * Get lbry files with pending claims diff --git a/lbrynet/analytics/events.py b/lbrynet/analytics/events.py index a15c5c50b..2c4eb498d 100644 --- a/lbrynet/analytics/events.py +++ b/lbrynet/analytics/events.py @@ -67,6 +67,7 @@ class Events(object): 'module': log_record.module, 'lineno': log_record.lineno, 'name': log_record.name, + 'traceback': log_record.exc_text, } return self._event('Error', properties) diff --git a/lbrynet/analytics/logging_handler.py b/lbrynet/analytics/logging_handler.py index 14dbf2e28..e7bdb4ac9 100644 --- a/lbrynet/analytics/logging_handler.py +++ b/lbrynet/analytics/logging_handler.py @@ -8,4 +8,7 @@ class Handler(logging.Handler): logging.Handler.__init__(self, level) def emit(self, record): + # We need to call format to ensure that record.message and + # record.exc_text attributes are populated + self.format(record) self.manager.send_error(record) diff --git a/lbrynet/core/HashAnnouncer.py b/lbrynet/core/HashAnnouncer.py index 841f685b6..5453eed15 100644 --- a/lbrynet/core/HashAnnouncer.py +++ b/lbrynet/core/HashAnnouncer.py @@ -11,5 +11,8 @@ class DummyHashAnnouncer(object): def add_supplier(self, *args): pass + def hash_queue_size(self): + return 0 + def immediate_announce(self, *args): pass diff --git a/lbrynet/core/client/ConnectionManager.py b/lbrynet/core/client/ConnectionManager.py index bb06a645b..2bbce9164 100644 --- a/lbrynet/core/client/ConnectionManager.py +++ b/lbrynet/core/client/ConnectionManager.py @@ -5,7 +5,7 @@ from lbrynet import interfaces from lbrynet import conf from lbrynet.core.client.ClientProtocol import ClientProtocolFactory from lbrynet.core.Error import InsufficientFundsError - +from lbrynet.core import utils log = logging.getLogger(__name__) @@ -19,7 +19,6 @@ class PeerConnectionHandler(object): class ConnectionManager(object): implements(interfaces.IConnectionManager) - callLater = reactor.callLater MANAGE_CALL_INTERVAL_SEC = 1 def __init__(self, downloader, rate_limiter, @@ -54,7 +53,7 @@ class ConnectionManager(object): def start(self): log.debug("%s starting", self._get_log_name()) self._start() - self._next_manage_call = self.callLater(0, self.manage) + self._next_manage_call = utils.call_later(0, self.manage) return defer.succeed(True) @@ -156,7 +155,7 @@ class ConnectionManager(object): self._manage_deferred.callback(None) self._manage_deferred = None if not self.stopped and schedule_next_call: - self._next_manage_call = self.callLater(self.MANAGE_CALL_INTERVAL_SEC, self.manage) + self._next_manage_call = utils.call_later(self.MANAGE_CALL_INTERVAL_SEC, self.manage) def _rank_request_creator_connections(self): """Returns an ordered list of our request creators, ranked according diff --git a/lbrynet/core/client/DHTPeerFinder.py b/lbrynet/core/client/DHTPeerFinder.py index 7970a194a..19b3ac39a 100644 --- a/lbrynet/core/client/DHTPeerFinder.py +++ b/lbrynet/core/client/DHTPeerFinder.py @@ -40,7 +40,7 @@ class DHTPeerFinder(object): def find_peers_for_blob(self, blob_hash, timeout=None): def _trigger_timeout(): if not finished_deferred.called: - log.warning("Peer search for %s timed out", short_hash(blob_hash)) + log.debug("Peer search for %s timed out", short_hash(blob_hash)) finished_deferred.cancel() bin_hash = binascii.unhexlify(blob_hash) diff --git a/lbrynet/core/server/DHTHashAnnouncer.py b/lbrynet/core/server/DHTHashAnnouncer.py index a68a97d0f..69e5123aa 100644 --- a/lbrynet/core/server/DHTHashAnnouncer.py +++ b/lbrynet/core/server/DHTHashAnnouncer.py @@ -3,14 +3,13 @@ import collections import logging import time -from twisted.internet import defer, reactor - +from twisted.internet import defer +from lbrynet.core import utils log = logging.getLogger(__name__) class DHTHashAnnouncer(object): - callLater = reactor.callLater ANNOUNCE_CHECK_INTERVAL = 60 CONCURRENT_ANNOUNCERS = 5 @@ -26,7 +25,7 @@ class DHTHashAnnouncer(object): def run_manage_loop(self): if self.peer_port is not None: self._announce_available_hashes() - self.next_manage_call = self.callLater(self.ANNOUNCE_CHECK_INTERVAL, self.run_manage_loop) + self.next_manage_call = utils.call_later(self.ANNOUNCE_CHECK_INTERVAL, self.run_manage_loop) def stop(self): log.info("Stopping %s", self) @@ -79,7 +78,7 @@ class DHTHashAnnouncer(object): log.debug('Announcing blob %s to dht', h) d = self.dht_node.announceHaveBlob(binascii.unhexlify(h), self.peer_port) d.chainDeferred(announce_deferred) - d.addBoth(lambda _: self.callLater(0, announce)) + d.addBoth(lambda _: utils.call_later(0, announce)) else: self._concurrent_announcers -= 1 diff --git a/lbrynet/core/utils.py b/lbrynet/core/utils.py index 9aa6deae0..b304baf99 100644 --- a/lbrynet/core/utils.py +++ b/lbrynet/core/utils.py @@ -43,6 +43,13 @@ def datetime_obj(*args, **kwargs): return datetime.datetime(*args, **kwargs) +def call_later(delay, func, *args, **kwargs): + # Import here to ensure that it gets called after installing a reator + # see: http://twistedmatrix.com/documents/current/core/howto/choosing-reactor.html + from twisted.internet import reactor + return reactor.callLater(delay, func, *args, **kwargs) + + def generate_id(num=None): h = get_lbry_hash_obj() if num is not None: diff --git a/lbrynet/lbrynet_daemon/Daemon.py b/lbrynet/lbrynet_daemon/Daemon.py index b40d0c9d1..b62f9495d 100644 --- a/lbrynet/lbrynet_daemon/Daemon.py +++ b/lbrynet/lbrynet_daemon/Daemon.py @@ -105,6 +105,7 @@ class FileID: NAME = 'name' SD_HASH = 'sd_hash' FILE_NAME = 'file_name' + STREAM_HASH = 'stream_hash' # TODO add login credentials in a conf file @@ -819,17 +820,13 @@ class Daemon(AuthJSONRPCServer): """ timeout = timeout if timeout is not None else conf.settings['download_timeout'] - try: - helper = _DownloadNameHelper(self, name, timeout, download_directory, file_name, - wait_for_write) - except Exception as err: - log.exception(err) - raise err + helper = _DownloadNameHelper(self, name, timeout, download_directory, file_name, + wait_for_write) if not stream_info: self.waiting_on[name] = True stream_info = yield self._resolve_name(name) - del self.waiting_on[name] + lbry_file = yield helper.setup_stream(stream_info) sd_hash, file_path = yield helper.wait_or_get_stream(stream_info, lbry_file) defer.returnValue((sd_hash, file_path)) @@ -1029,6 +1026,12 @@ class Daemon(AuthJSONRPCServer): return lbry_file raise Exception("File %s not found" % file_name) + def _find_lbry_file_by_stream_hash(self, stream_hash): + for lbry_file in self.lbry_file_manager.lbry_files: + if lbry_file.stream_hash == stream_hash: + return lbry_file + raise NoSuchStreamHash(stream_hash) + @defer.inlineCallbacks def _get_lbry_file_by_uri(self, name): try: @@ -1049,6 +1052,11 @@ class Daemon(AuthJSONRPCServer): lbry_file = yield self._get_lbry_file_by_file_name(file_name) defer.returnValue(lbry_file) + @defer.inlineCallbacks + def _get_lbry_file_by_stream_hash(self, stream_hash): + lbry_file = yield self._find_lbry_file_by_stream_hash(stream_hash) + defer.returnValue(lbry_file) + @defer.inlineCallbacks def _get_lbry_file(self, search_by, val, return_json=True): helper = _GetFileHelper(self, search_by, val, return_json) @@ -1057,7 +1065,7 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(lbry_file) except Exception as err: # TODO: do something with the error, don't return None when a file isn't found - defer.returnValue(None) + defer.returnValue(False) def _get_lbry_files(self): def safe_get(sd_hash): @@ -1376,7 +1384,10 @@ class Daemon(AuthJSONRPCServer): Returns: list """ - return self._render_response(sorted(self.callable_methods.keys())) + return self._render_response(sorted( + [command for command in self.callable_methods.keys() + if 'DEPRECATED' not in getattr(self, "jsonrpc_" + command).__doc__] + )) def jsonrpc_get_balance(self): """ @@ -1453,22 +1464,35 @@ class Daemon(AuthJSONRPCServer): def jsonrpc_file_get(self, **kwargs): """ - Get a file + Get a file, if no matching file exists returns False Args: 'name': get file by lbry uri, 'sd_hash': get file by the hash in the name claim, 'file_name': get file by its name in the downloads folder, + 'stream_hash': get file by its stream hash Returns: - 'completed': bool - 'file_name': string - 'key': hex string - 'points_paid': float - 'stopped': bool - 'stream_hash': base 58 string - 'stream_name': string - 'suggested_file_name': string - 'sd_hash': string + 'completed': bool, + 'file_name': str, + 'download_directory': str, + 'points_paid': float, + 'stopped': bool, + 'stream_hash': str (hex), + 'stream_name': str, + 'suggested_file_name': str, + 'sd_hash': str (hex), + 'lbry_uri': str, + 'txid': str (b58), + 'claim_id': str (b58), + 'download_path': str, + 'mime_type': str, + 'key': str (hex), + 'total_bytes': int, + 'written_bytes': int, + 'code': str, + 'message': str + 'metadata': Metadata dict if claim is valid, otherwise status str + } """ d = self._get_deferred_for_lbry_file(kwargs) d.addCallback(lambda r: self._render_response(r)) @@ -1557,6 +1581,7 @@ class Daemon(AuthJSONRPCServer): 'stream_hash': hex string 'path': path of download """ + timeout = timeout if timeout is not None else self.download_timeout download_directory = download_directory or self.download_directory sd_hash = get_sd_hash(stream_info) @@ -2451,7 +2476,11 @@ class Daemon(AuthJSONRPCServer): blob_hashes = [blob.blob_hash for blob in blobs] if need_sd_blob: # we don't want to use self._download_descriptor here because it would create a stream - sd_blob = yield self._download_blob(sd_hash, timeout=sd_timeout) + try: + sd_blob = yield self._download_blob(sd_hash, timeout=sd_timeout) + except Exception as err: + response = yield self._render_response(0.0) + defer.returnValue(response) decoded = read_sd_blob(sd_blob) blob_hashes = [blob.get("blob_hash") for blob in decoded['blobs'] if blob.get("blob_hash")] @@ -2488,10 +2517,7 @@ class Daemon(AuthJSONRPCServer): def get_sd_hash(stream_info): if not stream_info: return None - try: - return stream_info['sources']['lbry_sd_hash'] - except KeyError: - return stream_info.get('stream_hash') + return stream_info['sources']['lbry_sd_hash'] class _DownloadNameHelper(object): @@ -2510,7 +2536,7 @@ class _DownloadNameHelper(object): @defer.inlineCallbacks def setup_stream(self, stream_info): sd_hash = get_sd_hash(stream_info) - lbry_file = yield self.daemon._get_lbry_file_by_sd_hash(sd_hash) + lbry_file = yield self.daemon._get_lbry_file(FileID.SD_HASH, sd_hash) if self._does_lbry_file_exists(lbry_file): defer.returnValue(lbry_file) else: @@ -2673,6 +2699,8 @@ class _GetFileHelper(object): return self.daemon._get_lbry_file_by_sd_hash(self.val) elif self.search_by == FileID.FILE_NAME: return self.daemon._get_lbry_file_by_file_name(self.val) + elif self.search_by == FileID.STREAM_HASH: + return self.daemon._get_lbry_file_by_stream_hash(self.val) raise Exception('{} is not a valid search operation'.format(self.search_by)) def _get_json(self, lbry_file): @@ -2806,7 +2834,7 @@ def report_bug_to_slack(message, installation_id, platform_name, app_version): def get_lbry_file_search_value(search_fields): - for searchtype in (FileID.SD_HASH, FileID.NAME, FileID.FILE_NAME): + for searchtype in (FileID.SD_HASH, FileID.NAME, FileID.FILE_NAME, FileID.STREAM_HASH): value = search_fields.get(searchtype) if value: return searchtype, value diff --git a/tests/unit/core/client/test_ConnectionManager.py b/tests/unit/core/client/test_ConnectionManager.py index 0aee113d3..81d69429e 100644 --- a/tests/unit/core/client/test_ConnectionManager.py +++ b/tests/unit/core/client/test_ConnectionManager.py @@ -3,7 +3,7 @@ import time import logging from lbrynet.core import log_support -from lbrynet.core.client.ConnectionManager import ConnectionManager +#from lbrynet.core.client.ConnectionManager import ConnectionManager from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.server.ServerProtocol import ServerProtocol from lbrynet.core.RateLimiter import RateLimiter @@ -16,6 +16,7 @@ from twisted.internet import defer, reactor, task from twisted.internet.task import deferLater from twisted.internet.protocol import Protocol, ServerFactory from lbrynet import conf +from lbrynet.core import utils from lbrynet.interfaces import IQueryHandlerFactory, IQueryHandler, IRequestCreator from zope.interface import implements @@ -122,11 +123,12 @@ class TestIntegrationConnectionManager(unittest.TestCase): self.downloader = MocDownloader() self.rate_limiter = RateLimiter() self.primary_request_creator = MocRequestCreator([self.TEST_PEER]) + self.clock = task.Clock() + utils.call_later = self.clock.callLater + from lbrynet.core.client.ConnectionManager import ConnectionManager self.connection_manager = ConnectionManager(self.downloader, self.rate_limiter, [self.primary_request_creator], []) - self.clock = task.Clock() - self.connection_manager.callLater = self.clock.callLater self.connection_manager._start() self.server_port = None diff --git a/tests/unit/core/server/test_DHTHashAnnouncer.py b/tests/unit/core/server/test_DHTHashAnnouncer.py index c6bb05967..fdeea9664 100644 --- a/tests/unit/core/server/test_DHTHashAnnouncer.py +++ b/tests/unit/core/server/test_DHTHashAnnouncer.py @@ -2,9 +2,7 @@ import os import binascii from twisted.trial import unittest from twisted.internet import defer,task -from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer,DHTHashSupplier -from lbrynet.core.utils import random_string -from lbrynet.core import log_support +from lbrynet.core import log_support, utils class MocDHTNode(object): @@ -35,8 +33,9 @@ class DHTHashAnnouncerTest(unittest.TestCase): self.blobs_to_announce.append(binascii.b2a_hex(os.urandom(32))) self.clock = task.Clock() self.dht_node = MocDHTNode() + utils.call_later = self.clock.callLater + from lbrynet.core.server.DHTHashAnnouncer import DHTHashAnnouncer,DHTHashSupplier self.announcer = DHTHashAnnouncer(self.dht_node, peer_port=3333) - self.announcer.callLater = self.clock.callLater self.supplier = MocSupplier(self.blobs_to_announce) self.announcer.add_supplier(self.supplier)