mirror of
https://github.com/LBRYFoundation/lbry-sdk.git
synced 2025-09-14 22:49:49 +00:00
DHT py3 compatibility, mostly commenting out implements() and fixing imports
cryptstream py3 support, mostly commenting out implements() lbry_file py3 support, mostly commenting out implements() file_manager py3 support, mostly commenting out implements() core py3 support, mostly commenting out implements() and fixing imports
This commit is contained in:
parent
b3e9240aa8
commit
5520d518b5
28 changed files with 60 additions and 61 deletions
|
@ -49,7 +49,7 @@ class DummyRateLimiter(object):
|
||||||
class RateLimiter(object):
|
class RateLimiter(object):
|
||||||
"""This class ensures that upload and download rates don't exceed specified maximums"""
|
"""This class ensures that upload and download rates don't exceed specified maximums"""
|
||||||
|
|
||||||
implements(IRateLimiter)
|
#implements(IRateLimiter)
|
||||||
|
|
||||||
#called by main application
|
#called by main application
|
||||||
|
|
||||||
|
|
|
@ -40,7 +40,7 @@ def cache(fn):
|
||||||
|
|
||||||
|
|
||||||
class BlobRequester(object):
|
class BlobRequester(object):
|
||||||
implements(IRequestCreator)
|
#implements(IRequestCreator)
|
||||||
|
|
||||||
def __init__(self, blob_manager, peer_finder, payment_rate_manager, wallet, download_manager):
|
def __init__(self, blob_manager, peer_finder, payment_rate_manager, wallet, download_manager):
|
||||||
self.blob_manager = blob_manager
|
self.blob_manager = blob_manager
|
||||||
|
|
|
@ -24,7 +24,7 @@ def encode_decimal(obj):
|
||||||
|
|
||||||
|
|
||||||
class ClientProtocol(Protocol, TimeoutMixin):
|
class ClientProtocol(Protocol, TimeoutMixin):
|
||||||
implements(IRequestSender, IRateLimited)
|
#implements(IRequestSender, IRateLimited)
|
||||||
######### Protocol #########
|
######### Protocol #########
|
||||||
PROTOCOL_TIMEOUT = 30
|
PROTOCOL_TIMEOUT = 30
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ class PeerConnectionHandler(object):
|
||||||
|
|
||||||
|
|
||||||
class ConnectionManager(object):
|
class ConnectionManager(object):
|
||||||
implements(interfaces.IConnectionManager)
|
#implements(interfaces.IConnectionManager)
|
||||||
MANAGE_CALL_INTERVAL_SEC = 5
|
MANAGE_CALL_INTERVAL_SEC = 5
|
||||||
TCP_CONNECT_TIMEOUT = 15
|
TCP_CONNECT_TIMEOUT = 15
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DownloadManager(object):
|
class DownloadManager(object):
|
||||||
implements(interfaces.IDownloadManager)
|
#implements(interfaces.IDownloadManager)
|
||||||
|
|
||||||
def __init__(self, blob_manager):
|
def __init__(self, blob_manager):
|
||||||
self.blob_manager = blob_manager
|
self.blob_manager = blob_manager
|
||||||
|
|
|
@ -15,7 +15,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SingleBlobMetadataHandler(object):
|
class SingleBlobMetadataHandler(object):
|
||||||
implements(interfaces.IMetadataHandler)
|
#implements(interfaces.IMetadataHandler)
|
||||||
|
|
||||||
def __init__(self, blob_hash, download_manager):
|
def __init__(self, blob_hash, download_manager):
|
||||||
self.blob_hash = blob_hash
|
self.blob_hash = blob_hash
|
||||||
|
|
|
@ -8,7 +8,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class StreamProgressManager(object):
|
class StreamProgressManager(object):
|
||||||
implements(IProgressManager)
|
#implements(IProgressManager)
|
||||||
|
|
||||||
def __init__(self, finished_callback, blob_manager,
|
def __init__(self, finished_callback, blob_manager,
|
||||||
download_manager, delete_blob_after_finished=False):
|
download_manager, delete_blob_after_finished=False):
|
||||||
|
|
|
@ -13,7 +13,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BlobRequestHandlerFactory(object):
|
class BlobRequestHandlerFactory(object):
|
||||||
implements(IQueryHandlerFactory)
|
#implements(IQueryHandlerFactory)
|
||||||
|
|
||||||
def __init__(self, blob_manager, wallet, payment_rate_manager, analytics_manager):
|
def __init__(self, blob_manager, wallet, payment_rate_manager, analytics_manager):
|
||||||
self.blob_manager = blob_manager
|
self.blob_manager = blob_manager
|
||||||
|
@ -36,7 +36,7 @@ class BlobRequestHandlerFactory(object):
|
||||||
|
|
||||||
|
|
||||||
class BlobRequestHandler(object):
|
class BlobRequestHandler(object):
|
||||||
implements(IQueryHandler, IBlobSender)
|
#implements(IQueryHandler, IBlobSender)
|
||||||
PAYMENT_RATE_QUERY = 'blob_data_payment_rate'
|
PAYMENT_RATE_QUERY = 'blob_data_payment_rate'
|
||||||
BLOB_QUERY = 'requested_blob'
|
BLOB_QUERY = 'requested_blob'
|
||||||
AVAILABILITY_QUERY = 'requested_blobs'
|
AVAILABILITY_QUERY = 'requested_blobs'
|
||||||
|
|
|
@ -24,7 +24,7 @@ class ServerProtocol(Protocol):
|
||||||
10) Pause/resume production when told by the rate limiter
|
10) Pause/resume production when told by the rate limiter
|
||||||
"""
|
"""
|
||||||
|
|
||||||
implements(interfaces.IConsumer)
|
#implements(interfaces.IConsumer)
|
||||||
|
|
||||||
#Protocol stuff
|
#Protocol stuff
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ class ServerRequestHandler(object):
|
||||||
return request for information about more blobs that are
|
return request for information about more blobs that are
|
||||||
associated with streams.
|
associated with streams.
|
||||||
"""
|
"""
|
||||||
implements(interfaces.IPushProducer, interfaces.IConsumer, IRequestHandler)
|
#implements(interfaces.IPushProducer, interfaces.IConsumer, IRequestHandler)
|
||||||
|
|
||||||
def __init__(self, consumer):
|
def __init__(self, consumer):
|
||||||
self.consumer = consumer
|
self.consumer = consumer
|
||||||
|
|
|
@ -3,7 +3,8 @@ import json
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from urllib2 import urlopen, URLError
|
from six.moves.urllib.request import urlopen
|
||||||
|
from six.moves.urllib.error import URLError
|
||||||
from lbryschema import __version__ as lbryschema_version
|
from lbryschema import __version__ as lbryschema_version
|
||||||
from lbrynet import build_type, __version__ as lbrynet_version
|
from lbrynet import build_type, __version__ as lbrynet_version
|
||||||
from lbrynet.conf import ROOT_DIR
|
from lbrynet.conf import ROOT_DIR
|
||||||
|
@ -19,7 +20,7 @@ def get_lbrynet_version():
|
||||||
stderr=devnull
|
stderr=devnull
|
||||||
).strip().lstrip('v')
|
).strip().lstrip('v')
|
||||||
except (subprocess.CalledProcessError, OSError):
|
except (subprocess.CalledProcessError, OSError):
|
||||||
print "failed to get version from git"
|
print("failed to get version from git")
|
||||||
return lbrynet_version
|
return lbrynet_version
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ class CryptStreamCreator(object):
|
||||||
the blob is associated with the stream.
|
the blob is associated with the stream.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
implements(interfaces.IConsumer)
|
#implements(interfaces.IConsumer)
|
||||||
|
|
||||||
def __init__(self, blob_manager, name=None, key=None, iv_generator=None):
|
def __init__(self, blob_manager, name=None, key=None, iv_generator=None):
|
||||||
"""@param blob_manager: Object that stores and provides access to blobs.
|
"""@param blob_manager: Object that stores and provides access to blobs.
|
||||||
|
|
|
@ -6,7 +6,7 @@ from lbrynet.interfaces import IBlobHandler
|
||||||
|
|
||||||
|
|
||||||
class CryptBlobHandler(object):
|
class CryptBlobHandler(object):
|
||||||
implements(IBlobHandler)
|
#implements(IBlobHandler)
|
||||||
|
|
||||||
def __init__(self, key, write_func):
|
def __init__(self, key, write_func):
|
||||||
self.key = key
|
self.key = key
|
||||||
|
|
|
@ -36,7 +36,7 @@ class CurrentlyStartingError(Exception):
|
||||||
|
|
||||||
class CryptStreamDownloader(object):
|
class CryptStreamDownloader(object):
|
||||||
|
|
||||||
implements(IStreamDownloader)
|
#implements(IStreamDownloader)
|
||||||
|
|
||||||
def __init__(self, peer_finder, rate_limiter, blob_manager, payment_rate_manager, wallet,
|
def __init__(self, peer_finder, rate_limiter, blob_manager, payment_rate_manager, wallet,
|
||||||
key, stream_name):
|
key, stream_name):
|
||||||
|
|
|
@ -1,12 +1,10 @@
|
||||||
import UserDict
|
from collections import UserDict
|
||||||
import constants
|
from . import constants
|
||||||
from interface import IDataStore
|
|
||||||
from zope.interface import implements
|
|
||||||
|
|
||||||
|
|
||||||
class DictDataStore(UserDict.DictMixin):
|
class DictDataStore(UserDict):
|
||||||
""" A datastore using an in-memory Python dictionary """
|
""" A datastore using an in-memory Python dictionary """
|
||||||
implements(IDataStore)
|
#implements(IDataStore)
|
||||||
|
|
||||||
def __init__(self, getTime=None):
|
def __init__(self, getTime=None):
|
||||||
# Dictionary format:
|
# Dictionary format:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from error import DecodeError
|
from .error import DecodeError
|
||||||
|
|
||||||
|
|
||||||
class Encoding(object):
|
class Encoding(object):
|
||||||
|
@ -68,7 +68,7 @@ class Bencode(Encoding):
|
||||||
encodedDictItems += self.encode(data[key])
|
encodedDictItems += self.encode(data[key])
|
||||||
return 'd%se' % encodedDictItems
|
return 'd%se' % encodedDictItems
|
||||||
else:
|
else:
|
||||||
print data
|
print(data)
|
||||||
raise TypeError("Cannot bencode '%s' object" % type(data))
|
raise TypeError("Cannot bencode '%s' object" % type(data))
|
||||||
|
|
||||||
def decode(self, data):
|
def decode(self, data):
|
||||||
|
@ -126,8 +126,8 @@ class Bencode(Encoding):
|
||||||
splitPos = data[startIndex:].find(':') + startIndex
|
splitPos = data[startIndex:].find(':') + startIndex
|
||||||
try:
|
try:
|
||||||
length = int(data[startIndex:splitPos])
|
length = int(data[startIndex:splitPos])
|
||||||
except ValueError, e:
|
except ValueError:
|
||||||
raise DecodeError, e
|
raise DecodeError()
|
||||||
startIndex = splitPos + 1
|
startIndex = splitPos + 1
|
||||||
endPos = startIndex + length
|
endPos = startIndex + length
|
||||||
bytes = data[startIndex:endPos]
|
bytes = data[startIndex:endPos]
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import binascii
|
import binascii
|
||||||
import exceptions
|
#import exceptions
|
||||||
|
|
||||||
# this is a dict of {"exceptions.<exception class name>": exception class} items used to raise
|
# this is a dict of {"exceptions.<exception class name>": exception class} items used to raise
|
||||||
# remote built-in exceptions locally
|
# remote built-in exceptions locally
|
||||||
BUILTIN_EXCEPTIONS = {
|
BUILTIN_EXCEPTIONS = {
|
||||||
"exceptions.%s" % e: getattr(exceptions, e) for e in dir(exceptions) if not e.startswith("_")
|
# "exceptions.%s" % e: getattr(exceptions, e) for e in dir(exceptions) if not e.startswith("_")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import logging
|
import logging
|
||||||
from twisted.internet import defer
|
|
||||||
from distance import Distance
|
|
||||||
from error import TimeoutError
|
|
||||||
import constants
|
|
||||||
import struct
|
import struct
|
||||||
|
from twisted.internet import defer
|
||||||
|
from .distance import Distance
|
||||||
|
from .error import TimeoutError
|
||||||
|
from . import constants
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import logging
|
import logging
|
||||||
import constants
|
from . import constants
|
||||||
from distance import Distance
|
from .distance import Distance
|
||||||
from error import BucketFull
|
from .error import BucketFull
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
# The docstrings in this module contain epytext markup; API documentation
|
# The docstrings in this module contain epytext markup; API documentation
|
||||||
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
||||||
|
|
||||||
import msgtypes
|
from . import msgtypes
|
||||||
|
|
||||||
|
|
||||||
class MessageTranslator(object):
|
class MessageTranslator(object):
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
||||||
|
|
||||||
from lbrynet.core.utils import generate_id
|
from lbrynet.core.utils import generate_id
|
||||||
import constants
|
from . import constants
|
||||||
|
|
||||||
|
|
||||||
class Message(object):
|
class Message(object):
|
||||||
|
|
|
@ -15,14 +15,14 @@ from twisted.internet import defer, error, task
|
||||||
from lbrynet.core.utils import generate_id, DeferredDict
|
from lbrynet.core.utils import generate_id, DeferredDict
|
||||||
from lbrynet.core.call_later_manager import CallLaterManager
|
from lbrynet.core.call_later_manager import CallLaterManager
|
||||||
from lbrynet.core.PeerManager import PeerManager
|
from lbrynet.core.PeerManager import PeerManager
|
||||||
from error import TimeoutError
|
from .error import TimeoutError
|
||||||
import constants
|
from . import constants
|
||||||
import routingtable
|
from . import routingtable
|
||||||
import datastore
|
from . import datastore
|
||||||
import protocol
|
from . import protocol
|
||||||
from peerfinder import DHTPeerFinder
|
from .peerfinder import DHTPeerFinder
|
||||||
from contact import ContactManager
|
from .contact import ContactManager
|
||||||
from iterativefind import iterativeFind
|
from .iterativefind import iterativeFind
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -19,7 +19,7 @@ class DummyPeerFinder(object):
|
||||||
|
|
||||||
class DHTPeerFinder(DummyPeerFinder):
|
class DHTPeerFinder(DummyPeerFinder):
|
||||||
"""This class finds peers which have announced to the DHT that they have certain blobs"""
|
"""This class finds peers which have announced to the DHT that they have certain blobs"""
|
||||||
implements(IPeerFinder)
|
#implements(IPeerFinder)
|
||||||
|
|
||||||
def __init__(self, dht_node, peer_manager):
|
def __init__(self, dht_node, peer_manager):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -4,12 +4,12 @@ import errno
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
from twisted.internet import protocol, defer
|
from twisted.internet import protocol, defer
|
||||||
from error import BUILTIN_EXCEPTIONS, UnknownRemoteException, TimeoutError, TransportNotConnected
|
from .error import BUILTIN_EXCEPTIONS, UnknownRemoteException, TimeoutError, TransportNotConnected
|
||||||
|
|
||||||
import constants
|
from . import constants
|
||||||
import encoding
|
from . import encoding
|
||||||
import msgtypes
|
from . import msgtypes
|
||||||
import msgformat
|
from . import msgformat
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -436,7 +436,7 @@ class KademliaProtocol(protocol.DatagramProtocol):
|
||||||
result = func(senderContact, *a)
|
result = func(senderContact, *a)
|
||||||
else:
|
else:
|
||||||
result = func()
|
result = func()
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
log.exception("error handling request for %s:%i %s", senderContact.address, senderContact.port, method)
|
log.exception("error handling request for %s:%i %s", senderContact.address, senderContact.port, method)
|
||||||
df.errback(e)
|
df.errback(e)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -8,11 +8,11 @@
|
||||||
import random
|
import random
|
||||||
from zope.interface import implements
|
from zope.interface import implements
|
||||||
from twisted.internet import defer
|
from twisted.internet import defer
|
||||||
import constants
|
from . import constants
|
||||||
import kbucket
|
from . import kbucket
|
||||||
from error import TimeoutError
|
from .error import TimeoutError
|
||||||
from distance import Distance
|
from .distance import Distance
|
||||||
from interface import IRoutingTable
|
from .interface import IRoutingTable
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -33,7 +33,7 @@ class TreeRoutingTable(object):
|
||||||
C{PING} RPC-based k-bucket eviction algorithm described in section 2.2 of
|
C{PING} RPC-based k-bucket eviction algorithm described in section 2.2 of
|
||||||
that paper.
|
that paper.
|
||||||
"""
|
"""
|
||||||
implements(IRoutingTable)
|
#implements(IRoutingTable)
|
||||||
|
|
||||||
def __init__(self, parentNodeID, getTime=None):
|
def __init__(self, parentNodeID, getTime=None):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -163,7 +163,7 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
|
||||||
|
|
||||||
|
|
||||||
class ManagedEncryptedFileDownloaderFactory(object):
|
class ManagedEncryptedFileDownloaderFactory(object):
|
||||||
implements(IStreamDownloaderFactory)
|
#implements(IStreamDownloaderFactory)
|
||||||
|
|
||||||
def __init__(self, lbry_file_manager, blob_manager):
|
def __init__(self, lbry_file_manager, blob_manager):
|
||||||
self.lbry_file_manager = lbry_file_manager
|
self.lbry_file_manager = lbry_file_manager
|
||||||
|
|
|
@ -91,7 +91,7 @@ class EncryptedFileDownloader(CryptStreamDownloader):
|
||||||
|
|
||||||
|
|
||||||
class EncryptedFileDownloaderFactory(object):
|
class EncryptedFileDownloaderFactory(object):
|
||||||
implements(IStreamDownloaderFactory)
|
#implements(IStreamDownloaderFactory)
|
||||||
|
|
||||||
def __init__(self, peer_finder, rate_limiter, blob_manager, storage, wallet):
|
def __init__(self, peer_finder, rate_limiter, blob_manager, storage, wallet):
|
||||||
self.peer_finder = peer_finder
|
self.peer_finder = peer_finder
|
||||||
|
|
|
@ -8,7 +8,7 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EncryptedFileMetadataHandler(object):
|
class EncryptedFileMetadataHandler(object):
|
||||||
implements(IMetadataHandler)
|
#implements(IMetadataHandler)
|
||||||
|
|
||||||
def __init__(self, stream_hash, storage, download_manager):
|
def __init__(self, stream_hash, storage, download_manager):
|
||||||
self.stream_hash = stream_hash
|
self.stream_hash = stream_hash
|
||||||
|
|
Loading…
Add table
Reference in a new issue