mirror of
https://github.com/LBRYFoundation/lbry-sdk.git
synced 2025-08-23 17:27:25 +00:00
Compare commits
14 commits
Author | SHA1 | Date | |
---|---|---|---|
|
502e627ec8 | ||
|
e0104e7c16 | ||
|
6a6e3cfe07 | ||
|
78b6cf26c2 | ||
|
08f083c5d9 | ||
|
5543630802 | ||
|
678a475427 | ||
|
5f768406e8 | ||
|
96977f803b | ||
|
23cffdc066 | ||
|
45a14f54fb | ||
|
f97e788d44 | ||
|
da9e6a8274 | ||
|
d3984c7b7a |
13 changed files with 304 additions and 165 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -6,6 +6,7 @@
|
|||
/.coverage*
|
||||
/lbry-venv
|
||||
/venv
|
||||
/lbry/blockchain
|
||||
|
||||
lbry.egg-info
|
||||
__pycache__
|
||||
|
@ -17,4 +18,4 @@ _trial_temp/
|
|||
/lbry/wallet/bin
|
||||
|
||||
/.vscode
|
||||
/.gitignore
|
||||
/.gitignore
|
||||
|
|
294
docs/api.json
294
docs/api.json
File diff suppressed because one or more lines are too long
|
@ -557,6 +557,14 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
await self.component_manager.stop()
|
||||
else:
|
||||
self.component_startup_task.cancel()
|
||||
# the wallet component might have not started
|
||||
try:
|
||||
wallet_component = self.component_manager.get_actual_component('wallet')
|
||||
except NameError:
|
||||
pass
|
||||
else:
|
||||
await wallet_component.stop()
|
||||
await self.component_manager.stop()
|
||||
log.info("stopped api components")
|
||||
await self.rpc_runner.cleanup()
|
||||
await self.streaming_runner.cleanup()
|
||||
|
@ -2295,6 +2303,41 @@ class Daemon(metaclass=JSONRPCServerType):
|
|||
kwargs['is_not_spent'] = True
|
||||
return self.jsonrpc_txo_list(**kwargs)
|
||||
|
||||
async def jsonrpc_support_sum(self, claim_id, new_sdk_server, include_channel_content=False, **kwargs):
|
||||
"""
|
||||
List total staked supports for a claim, grouped by the channel that signed the support.
|
||||
+
|
||||
+ If claim_id is a channel claim, you can use --include_channel_content to also include supports for
|
||||
+ content claims in the channel.
|
||||
|
||||
!!!! NOTE: PAGINATION DOES NOT DO ANYTHING AT THE MOMENT !!!!!
|
||||
|
||||
Usage:
|
||||
support_sum <claim_id> <new_sdk_server>
|
||||
[--include_channel_content]
|
||||
[--page=<page>] [--page_size=<page_size>]
|
||||
|
||||
Options:
|
||||
--claim_id=<claim_id> : (str) claim id
|
||||
--new_sdk_server=<new_sdk_server> : (str) URL of the new SDK server (EXPERIMENTAL)
|
||||
--include_channel_content : (bool) if claim_id is for a channel, include supports for claims in
|
||||
that channel
|
||||
--page=<page> : (int) page to return during paginating
|
||||
--page_size=<page_size> : (int) number of items on page during pagination
|
||||
|
||||
Returns: {Paginated[Dict]}
|
||||
"""
|
||||
page_num, page_size = abs(kwargs.pop('page', 1)), min(abs(kwargs.pop('page_size', DEFAULT_PAGE_SIZE)), 50)
|
||||
kwargs.update({'offset': page_size * (page_num - 1), 'limit': page_size})
|
||||
support_sums = await self.ledger.sum_supports(
|
||||
new_sdk_server, claim_id=claim_id, include_channel_content=include_channel_content, **kwargs
|
||||
)
|
||||
return {
|
||||
"items": support_sums,
|
||||
"page": page_num,
|
||||
"page_size": page_size
|
||||
}
|
||||
|
||||
@requires(WALLET_COMPONENT)
|
||||
async def jsonrpc_claim_search(self, **kwargs):
|
||||
"""
|
||||
|
|
|
@ -22,8 +22,7 @@ def _create_url_regex():
|
|||
return _group(
|
||||
_named(name+"_name", prefix + invalid_names_regex) +
|
||||
_oneof(
|
||||
_group('#' + _named(name+"_claim_id", "[0-9a-f]{1,40}")),
|
||||
_group(':' + _named(name+"_sequence", '[1-9][0-9]*')),
|
||||
_group('[:#]' + _named(name+"_claim_id", "[0-9a-f]{1,40}")),
|
||||
_group(r'\$' + _named(name+"_amount_order", '[1-9][0-9]*'))
|
||||
) + '?'
|
||||
)
|
||||
|
@ -50,7 +49,6 @@ def normalize_name(name):
|
|||
class PathSegment(NamedTuple):
|
||||
name: str
|
||||
claim_id: str = None
|
||||
sequence: int = None
|
||||
amount_order: int = None
|
||||
|
||||
@property
|
||||
|
@ -61,17 +59,13 @@ class PathSegment(NamedTuple):
|
|||
q = {'name': self.name}
|
||||
if self.claim_id is not None:
|
||||
q['claim_id'] = self.claim_id
|
||||
if self.sequence is not None:
|
||||
q['sequence'] = self.sequence
|
||||
if self.amount_order is not None:
|
||||
q['amount_order'] = self.amount_order
|
||||
return q
|
||||
|
||||
def __str__(self):
|
||||
if self.claim_id is not None:
|
||||
return f"{self.name}#{self.claim_id}"
|
||||
elif self.sequence is not None:
|
||||
return f"{self.name}:{self.sequence}"
|
||||
return f"{self.name}:{self.claim_id}"
|
||||
elif self.amount_order is not None:
|
||||
return f"{self.name}${self.amount_order}"
|
||||
return self.name
|
||||
|
@ -118,7 +112,6 @@ class URL(NamedTuple):
|
|||
segments[segment] = PathSegment(
|
||||
parts[f'{segment}_name'],
|
||||
parts[f'{segment}_claim_id'],
|
||||
parts[f'{segment}_sequence'],
|
||||
parts[f'{segment}_amount_order']
|
||||
)
|
||||
|
||||
|
|
|
@ -121,7 +121,12 @@ class AIOSQLite:
|
|||
if self._closing:
|
||||
return
|
||||
self._closing = True
|
||||
await asyncio.get_event_loop().run_in_executor(self.writer_executor, self.writer_connection.close)
|
||||
|
||||
def __checkpoint_and_close(conn: sqlite3.Connection):
|
||||
conn.execute("PRAGMA WAL_CHECKPOINT(FULL);")
|
||||
conn.close()
|
||||
await asyncio.get_event_loop().run_in_executor(
|
||||
self.writer_executor, __checkpoint_and_close, self.writer_connection)
|
||||
self.writer_executor.shutdown(wait=True)
|
||||
self.reader_executor.shutdown(wait=True)
|
||||
self.read_ready.clear()
|
||||
|
@ -145,7 +150,7 @@ class AIOSQLite:
|
|||
self.waiting_reads_metric.inc()
|
||||
self.read_count_metric.inc()
|
||||
try:
|
||||
while self.writers: # more writes can come in while we are waiting for the first
|
||||
while self.writers and not self._closing: # more writes can come in while we are waiting for the first
|
||||
if not urgent_read and still_waiting and self.urgent_read_done.is_set():
|
||||
# throttle the writes if they pile up
|
||||
self.urgent_read_done.clear()
|
||||
|
@ -153,6 +158,8 @@ class AIOSQLite:
|
|||
# wait until the running writes have finished
|
||||
await self.read_ready.wait()
|
||||
still_waiting = True
|
||||
if self._closing:
|
||||
raise asyncio.CancelledError
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
self.reader_executor, read_only_fn, sql, parameters
|
||||
)
|
||||
|
@ -195,6 +202,8 @@ class AIOSQLite:
|
|||
self.read_ready.clear()
|
||||
try:
|
||||
async with self.write_lock:
|
||||
if self._closing:
|
||||
raise asyncio.CancelledError
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
self.writer_executor, lambda: self.__run_transaction(fun, *args, **kwargs)
|
||||
)
|
||||
|
@ -230,6 +239,8 @@ class AIOSQLite:
|
|||
self.read_ready.clear()
|
||||
try:
|
||||
async with self.write_lock:
|
||||
if self._closing:
|
||||
raise asyncio.CancelledError
|
||||
return await asyncio.get_event_loop().run_in_executor(
|
||||
self.writer_executor, self.__run_transaction_with_foreign_keys_disabled, fun, args, kwargs
|
||||
)
|
||||
|
|
|
@ -945,6 +945,9 @@ class Ledger(metaclass=LedgerRegistry):
|
|||
result[url] = txo
|
||||
return result
|
||||
|
||||
async def sum_supports(self, new_sdk_server, **kwargs) -> List[Dict]:
|
||||
return await self.network.sum_supports(new_sdk_server, **kwargs)
|
||||
|
||||
async def claim_search(
|
||||
self, accounts, include_purchase_receipt=False, include_is_my_output=False,
|
||||
new_sdk_server=None, **kwargs) -> Tuple[List[Output], dict, int, int]:
|
||||
|
|
|
@ -335,6 +335,12 @@ class Network:
|
|||
result = await r.json()
|
||||
return result['result']
|
||||
|
||||
async def sum_supports(self, server, **kwargs):
|
||||
message = {"method": "support_sum", "params": kwargs}
|
||||
async with self.aiohttp_session.post(server, json=message) as r:
|
||||
result = await r.json()
|
||||
return result['result']
|
||||
|
||||
|
||||
class SessionPool:
|
||||
|
||||
|
|
|
@ -145,14 +145,15 @@ class Wallet:
|
|||
elif not self.is_locked:
|
||||
log.warning(
|
||||
"Disk encryption requested but no password available for encryption. "
|
||||
"Saving wallet in an unencrypted state."
|
||||
"Resetting encryption preferences and saving wallet in an unencrypted state."
|
||||
)
|
||||
self.preferences[ENCRYPT_ON_DISK] = False
|
||||
return self.storage.write(self.to_dict())
|
||||
|
||||
@property
|
||||
def hash(self) -> bytes:
|
||||
h = sha256()
|
||||
if self.preferences.get(ENCRYPT_ON_DISK, False):
|
||||
if self.is_encrypted:
|
||||
assert self.encryption_password is not None, \
|
||||
"Encryption is enabled but no password is available, cannot generate hash."
|
||||
h.update(self.encryption_password.encode())
|
||||
|
@ -219,7 +220,11 @@ class Wallet:
|
|||
|
||||
@property
|
||||
def is_encrypted(self) -> bool:
|
||||
return self.is_locked or self.preferences.get(ENCRYPT_ON_DISK, False)
|
||||
# either its locked or it was unlocked using a password.
|
||||
# if its set to encrypt on preferences but isnt encrypted and no password was given so far,
|
||||
# then its not encrypted
|
||||
return self.is_locked or (
|
||||
self.preferences.get(ENCRYPT_ON_DISK, False) and self.encryption_password is not None)
|
||||
|
||||
def decrypt(self):
|
||||
assert not self.is_locked, "Cannot decrypt a locked wallet, unlock first."
|
||||
|
|
|
@ -25,7 +25,8 @@ RETURN_DOCS = {
|
|||
'File': encode_file_doc(),
|
||||
'Transaction': encode_tx_doc(),
|
||||
'Output': encode_txo_doc(),
|
||||
'Address': 'an address in base58'
|
||||
'Address': 'an address in base58',
|
||||
'Dict': 'glorious data in dictionary',
|
||||
}
|
||||
|
||||
|
||||
|
|
2
setup.py
2
setup.py
|
@ -34,7 +34,7 @@ setup(
|
|||
},
|
||||
install_requires=[
|
||||
'aiohttp==3.5.4',
|
||||
'aioupnp==0.0.17',
|
||||
'aioupnp==0.0.18',
|
||||
'appdirs==1.4.3',
|
||||
'certifi>=2018.11.29',
|
||||
'colorama==0.3.7',
|
||||
|
|
|
@ -117,11 +117,6 @@ class ResolveCommand(BaseResolveTestCase):
|
|||
await self.stream_create('foo', '0.9', allow_duplicate_name=True))
|
||||
# plain winning claim
|
||||
await self.assertResolvesToClaimId('foo', claim_id3)
|
||||
# sequence resolution
|
||||
await self.assertResolvesToClaimId('foo:1', claim_id1)
|
||||
await self.assertResolvesToClaimId('foo:2', claim_id2)
|
||||
await self.assertResolvesToClaimId('foo:3', claim_id3)
|
||||
await self.assertResolvesToClaimId('foo:4', None)
|
||||
# amount order resolution
|
||||
await self.assertResolvesToClaimId('foo$1', claim_id3)
|
||||
await self.assertResolvesToClaimId('foo$2', claim_id2)
|
||||
|
|
|
@ -9,14 +9,15 @@ claim_id = "63f2da17b0d90042c559cc73b6b17f853945c43e"
|
|||
class TestURLParsing(unittest.TestCase):
|
||||
|
||||
segments = 'stream', 'channel'
|
||||
fields = 'name', 'claim_id', 'sequence', 'amount_order'
|
||||
fields = 'name', 'claim_id', 'amount_order'
|
||||
|
||||
def _assert_url(self, url_string, **kwargs):
|
||||
def _assert_url(self, url_string, strictly=True, **kwargs):
|
||||
url = URL.parse(url_string)
|
||||
if url_string.startswith('lbry://'):
|
||||
self.assertEqual(url_string, str(url))
|
||||
else:
|
||||
self.assertEqual(f'lbry://{url_string}', str(url))
|
||||
if strictly:
|
||||
if url_string.startswith('lbry://'):
|
||||
self.assertEqual(url_string, str(url))
|
||||
else:
|
||||
self.assertEqual(f'lbry://{url_string}', str(url))
|
||||
present = {}
|
||||
for key in kwargs:
|
||||
for segment_name in self.segments:
|
||||
|
@ -42,19 +43,22 @@ class TestURLParsing(unittest.TestCase):
|
|||
url = self._assert_url
|
||||
# stream
|
||||
url('test', stream_name='test')
|
||||
url('test:1', stream_name='test', stream_sequence='1')
|
||||
url('test*1', stream_name='test*1')
|
||||
url('test$1', stream_name='test', stream_amount_order='1')
|
||||
url(f'test#{claim_id}', stream_name='test', stream_claim_id=claim_id)
|
||||
url(f'test#{claim_id}', stream_name='test', stream_claim_id=claim_id, strictly=False)
|
||||
url(f'test:{claim_id}', stream_name='test', stream_claim_id=claim_id)
|
||||
# channel
|
||||
url('@test', channel_name='@test')
|
||||
url('@test:1', channel_name='@test', channel_sequence='1')
|
||||
url('@test$1', channel_name='@test', channel_amount_order='1')
|
||||
url(f'@test#{claim_id}', channel_name='@test', channel_claim_id=claim_id)
|
||||
url(f'@test#{claim_id}', channel_name='@test', channel_claim_id=claim_id, strictly=False)
|
||||
url(f'@test:{claim_id}', channel_name='@test', channel_claim_id=claim_id)
|
||||
# channel/stream
|
||||
url('lbry://@test/stuff', channel_name='@test', stream_name='stuff')
|
||||
url('lbry://@test:1/stuff', channel_name='@test', channel_sequence='1', stream_name='stuff')
|
||||
url('lbry://@test$1/stuff', channel_name='@test', channel_amount_order='1', stream_name='stuff')
|
||||
url(f'lbry://@test#{claim_id}/stuff', channel_name='@test', channel_claim_id=claim_id, stream_name='stuff')
|
||||
url(f'lbry://@test#{claim_id}/stuff', channel_name='@test', channel_claim_id=claim_id, stream_name='stuff', strictly=False)
|
||||
url(f'lbry://@test:{claim_id}/stuff', channel_name='@test', channel_claim_id=claim_id, stream_name='stuff')
|
||||
# combined legacy and new
|
||||
url('@test:1/stuff#2', channel_claim_id='1', stream_claim_id='2', channel_name='@test', stream_name='stuff', strictly=False)
|
||||
# unicode regex edges
|
||||
_url = lambda name: url(name, stream_name=name)
|
||||
_url('\uD799')
|
||||
|
@ -104,10 +108,8 @@ class TestURLParsing(unittest.TestCase):
|
|||
fail("lbry://test@")
|
||||
fail("lbry://tes@t")
|
||||
fail(f"lbry://test:1#{claim_id}")
|
||||
fail("lbry://test:0")
|
||||
fail("lbry://test$0")
|
||||
fail("lbry://test/path")
|
||||
fail("lbry://@test1:1ab/fakepath")
|
||||
fail("lbry://test:1:1:1")
|
||||
fail("whatever/lbry://test")
|
||||
fail("lbry://lbry://test")
|
||||
|
@ -115,5 +117,4 @@ class TestURLParsing(unittest.TestCase):
|
|||
fail("lbry://abc:0x123")
|
||||
fail("lbry://abc:0x123/page")
|
||||
fail("lbry://@test1#ABCDEF/fakepath")
|
||||
fail("test:0001")
|
||||
fail("lbry://@test1$1/fakepath?arg1&arg2&arg3")
|
||||
|
|
|
@ -74,6 +74,48 @@ class TestWalletCreation(AsyncioTestCase):
|
|||
decrypted = Wallet.unpack('password', encrypted)
|
||||
self.assertEqual(decrypted['accounts'][0]['name'], 'An Account')
|
||||
|
||||
def test_no_password_but_encryption_preferred(self):
|
||||
wallet_dict = {
|
||||
'version': 1,
|
||||
'name': 'Main Wallet',
|
||||
'preferences': {
|
||||
"encrypt-on-disk": {
|
||||
"ts": 1571762543.351794,
|
||||
"value": True
|
||||
},
|
||||
},
|
||||
'accounts': [
|
||||
{
|
||||
'certificates': {},
|
||||
'name': 'An Account',
|
||||
'ledger': 'lbc_mainnet',
|
||||
'modified_on': 123,
|
||||
'seed':
|
||||
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
|
||||
"h absent",
|
||||
'encrypted': False,
|
||||
'private_key':
|
||||
'xprv9s21ZrQH143K42ovpZygnjfHdAqSd9jo7zceDfPRogM7bkkoNVv7'
|
||||
'DRNLEoB8HoirMgH969NrgL8jNzLEegqFzPRWM37GXd4uE8uuRkx4LAe',
|
||||
'public_key':
|
||||
'xpub661MyMwAqRbcGWtPvbWh9sc2BCfw2cTeVDYF23o3N1t6UZ5wv3EMm'
|
||||
'Dgp66FxHuDtWdft3B5eL5xQtyzAtkdmhhC95gjRjLzSTdkho95asu9',
|
||||
'address_generator': {
|
||||
'name': 'deterministic-chain',
|
||||
'receiving': {'gap': 17, 'maximum_uses_per_address': 3},
|
||||
'change': {'gap': 10, 'maximum_uses_per_address': 3}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
storage = WalletStorage(default=wallet_dict)
|
||||
wallet = Wallet.from_storage(storage, self.manager)
|
||||
self.assertEqual(
|
||||
hexlify(wallet.hash), b'8cc6341885e6ad46f72a17364c65f8441f09e79996c55202196b399c75f8d751'
|
||||
)
|
||||
self.assertFalse(wallet.is_encrypted)
|
||||
|
||||
def test_read_write(self):
|
||||
manager = WalletManager()
|
||||
config = {'data_path': '/tmp/wallet'}
|
||||
|
|
Loading…
Add table
Reference in a new issue