mirror of
https://github.com/LBRYFoundation/LBRY-Vault.git
synced 2025-08-23 17:47:31 +00:00
User has wallet file with history that includes some txid; corresponding raw tx is not in the "transactions" dict in the file however. When the synchronizer starts up, it requests this "missing" txn from the server... but what if the server does not know about it? Maybe it was reorged and is not in the new best chain, and not even in mempool. This was not handled previously. fix #5122
266 lines
11 KiB
Python
266 lines
11 KiB
Python
#!/usr/bin/env python
|
|
#
|
|
# Electrum - lightweight Bitcoin client
|
|
# Copyright (C) 2014 Thomas Voegtlin
|
|
#
|
|
# Permission is hereby granted, free of charge, to any person
|
|
# obtaining a copy of this software and associated documentation files
|
|
# (the "Software"), to deal in the Software without restriction,
|
|
# including without limitation the rights to use, copy, modify, merge,
|
|
# publish, distribute, sublicense, and/or sell copies of the Software,
|
|
# and to permit persons to whom the Software is furnished to do so,
|
|
# subject to the following conditions:
|
|
#
|
|
# The above copyright notice and this permission notice shall be
|
|
# included in all copies or substantial portions of the Software.
|
|
#
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
|
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
|
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
# SOFTWARE.
|
|
import asyncio
|
|
import hashlib
|
|
from typing import Dict, List, TYPE_CHECKING
|
|
from collections import defaultdict
|
|
|
|
from aiorpcx import TaskGroup, run_in_thread
|
|
|
|
from .transaction import Transaction
|
|
from .util import bh2u, make_aiohttp_session, NetworkJobOnDefaultServer
|
|
from .bitcoin import address_to_scripthash, is_address
|
|
from .network import UntrustedServerReturnedError
|
|
|
|
if TYPE_CHECKING:
|
|
from .network import Network
|
|
from .address_synchronizer import AddressSynchronizer
|
|
|
|
|
|
def history_status(h):
|
|
if not h:
|
|
return None
|
|
status = ''
|
|
for tx_hash, height in h:
|
|
status += tx_hash + ':%d:' % height
|
|
return bh2u(hashlib.sha256(status.encode('ascii')).digest())
|
|
|
|
|
|
class SynchronizerBase(NetworkJobOnDefaultServer):
|
|
"""Subscribe over the network to a set of addresses, and monitor their statuses.
|
|
Every time a status changes, run a coroutine provided by the subclass.
|
|
"""
|
|
def __init__(self, network: 'Network'):
|
|
self.asyncio_loop = network.asyncio_loop
|
|
NetworkJobOnDefaultServer.__init__(self, network)
|
|
|
|
def _reset(self):
|
|
super()._reset()
|
|
self.requested_addrs = set()
|
|
self.scripthash_to_address = {}
|
|
self._processed_some_notifications = False # so that we don't miss them
|
|
# Queues
|
|
self.add_queue = asyncio.Queue()
|
|
self.status_queue = asyncio.Queue()
|
|
|
|
async def _start_tasks(self):
|
|
try:
|
|
async with self.group as group:
|
|
await group.spawn(self.send_subscriptions())
|
|
await group.spawn(self.handle_status())
|
|
await group.spawn(self.main())
|
|
finally:
|
|
# we are being cancelled now
|
|
self.session.unsubscribe(self.status_queue)
|
|
|
|
def add(self, addr):
|
|
asyncio.run_coroutine_threadsafe(self._add_address(addr), self.asyncio_loop)
|
|
|
|
async def _add_address(self, addr: str):
|
|
if not is_address(addr): raise ValueError(f"invalid bitcoin address {addr}")
|
|
if addr in self.requested_addrs: return
|
|
self.requested_addrs.add(addr)
|
|
await self.add_queue.put(addr)
|
|
|
|
async def _on_address_status(self, addr, status):
|
|
"""Handle the change of the status of an address."""
|
|
raise NotImplementedError() # implemented by subclasses
|
|
|
|
async def send_subscriptions(self):
|
|
async def subscribe_to_address(addr):
|
|
h = address_to_scripthash(addr)
|
|
self.scripthash_to_address[h] = addr
|
|
await self.session.subscribe('blockchain.scripthash.subscribe', [h], self.status_queue)
|
|
self.requested_addrs.remove(addr)
|
|
|
|
while True:
|
|
addr = await self.add_queue.get()
|
|
await self.group.spawn(subscribe_to_address, addr)
|
|
|
|
async def handle_status(self):
|
|
while True:
|
|
h, status = await self.status_queue.get()
|
|
addr = self.scripthash_to_address[h]
|
|
await self.group.spawn(self._on_address_status, addr, status)
|
|
self._processed_some_notifications = True
|
|
|
|
async def main(self):
|
|
raise NotImplementedError() # implemented by subclasses
|
|
|
|
|
|
class Synchronizer(SynchronizerBase):
|
|
'''The synchronizer keeps the wallet up-to-date with its set of
|
|
addresses and their transactions. It subscribes over the network
|
|
to wallet addresses, gets the wallet to generate new addresses
|
|
when necessary, requests the transaction history of any addresses
|
|
we don't have the full history of, and requests binary transaction
|
|
data of any transactions the wallet doesn't have.
|
|
'''
|
|
def __init__(self, wallet: 'AddressSynchronizer'):
|
|
self.wallet = wallet
|
|
SynchronizerBase.__init__(self, wallet.network)
|
|
|
|
def _reset(self):
|
|
super()._reset()
|
|
self.requested_tx = {}
|
|
self.requested_histories = {}
|
|
|
|
def diagnostic_name(self):
|
|
return '{}:{}'.format(self.__class__.__name__, self.wallet.diagnostic_name())
|
|
|
|
def is_up_to_date(self):
|
|
return (not self.requested_addrs
|
|
and not self.requested_histories
|
|
and not self.requested_tx)
|
|
|
|
async def _on_address_status(self, addr, status):
|
|
history = self.wallet.history.get(addr, [])
|
|
if history_status(history) == status:
|
|
return
|
|
if addr in self.requested_histories:
|
|
return
|
|
# request address history
|
|
self.requested_histories[addr] = status
|
|
h = address_to_scripthash(addr)
|
|
result = await self.network.get_history_for_scripthash(h)
|
|
self.print_error("receiving history", addr, len(result))
|
|
hashes = set(map(lambda item: item['tx_hash'], result))
|
|
hist = list(map(lambda item: (item['tx_hash'], item['height']), result))
|
|
# tx_fees
|
|
tx_fees = [(item['tx_hash'], item.get('fee')) for item in result]
|
|
tx_fees = dict(filter(lambda x:x[1] is not None, tx_fees))
|
|
# Check that txids are unique
|
|
if len(hashes) != len(result):
|
|
self.print_error("error: server history has non-unique txids: %s"% addr)
|
|
# Check that the status corresponds to what was announced
|
|
elif history_status(hist) != status:
|
|
self.print_error("error: status mismatch: %s" % addr)
|
|
else:
|
|
# Store received history
|
|
self.wallet.receive_history_callback(addr, hist, tx_fees)
|
|
# Request transactions we don't have
|
|
await self._request_missing_txs(hist)
|
|
|
|
# Remove request; this allows up_to_date to be True
|
|
self.requested_histories.pop(addr)
|
|
|
|
async def _request_missing_txs(self, hist, *, allow_server_not_finding_tx=False):
|
|
# "hist" is a list of [tx_hash, tx_height] lists
|
|
transaction_hashes = []
|
|
for tx_hash, tx_height in hist:
|
|
if tx_hash in self.requested_tx:
|
|
continue
|
|
if tx_hash in self.wallet.transactions:
|
|
continue
|
|
transaction_hashes.append(tx_hash)
|
|
self.requested_tx[tx_hash] = tx_height
|
|
|
|
if not transaction_hashes: return
|
|
async with TaskGroup() as group:
|
|
for tx_hash in transaction_hashes:
|
|
await group.spawn(self._get_transaction(tx_hash, allow_server_not_finding_tx=allow_server_not_finding_tx))
|
|
|
|
async def _get_transaction(self, tx_hash, *, allow_server_not_finding_tx=False):
|
|
try:
|
|
result = await self.network.get_transaction(tx_hash)
|
|
except UntrustedServerReturnedError as e:
|
|
# most likely, "No such mempool or blockchain transaction"
|
|
if allow_server_not_finding_tx:
|
|
self.requested_tx.pop(tx_hash)
|
|
return
|
|
else:
|
|
raise
|
|
tx = Transaction(result)
|
|
try:
|
|
tx.deserialize()
|
|
except Exception:
|
|
self.print_msg("cannot deserialize transaction, skipping", tx_hash)
|
|
return
|
|
if tx_hash != tx.txid():
|
|
self.print_error("received tx does not match expected txid ({} != {})"
|
|
.format(tx_hash, tx.txid()))
|
|
return
|
|
tx_height = self.requested_tx.pop(tx_hash)
|
|
self.wallet.receive_tx_callback(tx_hash, tx, tx_height)
|
|
self.print_error("received tx %s height: %d bytes: %d" %
|
|
(tx_hash, tx_height, len(tx.raw)))
|
|
# callbacks
|
|
self.wallet.network.trigger_callback('new_transaction', self.wallet, tx)
|
|
|
|
async def main(self):
|
|
self.wallet.set_up_to_date(False)
|
|
# request missing txns, if any
|
|
for history in self.wallet.history.values():
|
|
# Old electrum servers returned ['*'] when all history for the address
|
|
# was pruned. This no longer happens but may remain in old wallets.
|
|
if history == ['*']: continue
|
|
await self._request_missing_txs(history, allow_server_not_finding_tx=True)
|
|
# add addresses to bootstrap
|
|
for addr in self.wallet.get_addresses():
|
|
await self._add_address(addr)
|
|
# main loop
|
|
while True:
|
|
await asyncio.sleep(0.1)
|
|
await run_in_thread(self.wallet.synchronize)
|
|
up_to_date = self.is_up_to_date()
|
|
if (up_to_date != self.wallet.is_up_to_date()
|
|
or up_to_date and self._processed_some_notifications):
|
|
self._processed_some_notifications = False
|
|
self.wallet.set_up_to_date(up_to_date)
|
|
self.wallet.network.trigger_callback('wallet_updated', self.wallet)
|
|
|
|
|
|
class Notifier(SynchronizerBase):
|
|
"""Watch addresses. Every time the status of an address changes,
|
|
an HTTP POST is sent to the corresponding URL.
|
|
"""
|
|
def __init__(self, network):
|
|
SynchronizerBase.__init__(self, network)
|
|
self.watched_addresses = defaultdict(list) # type: Dict[str, List[str]]
|
|
self.start_watching_queue = asyncio.Queue()
|
|
|
|
async def main(self):
|
|
# resend existing subscriptions if we were restarted
|
|
for addr in self.watched_addresses:
|
|
await self._add_address(addr)
|
|
# main loop
|
|
while True:
|
|
addr, url = await self.start_watching_queue.get()
|
|
self.watched_addresses[addr].append(url)
|
|
await self._add_address(addr)
|
|
|
|
async def _on_address_status(self, addr, status):
|
|
self.print_error('new status for addr {}'.format(addr))
|
|
headers = {'content-type': 'application/json'}
|
|
data = {'address': addr, 'status': status}
|
|
for url in self.watched_addresses[addr]:
|
|
try:
|
|
async with make_aiohttp_session(proxy=self.network.proxy, headers=headers) as session:
|
|
async with session.post(url, json=data, headers=headers) as resp:
|
|
await resp.text()
|
|
except Exception as e:
|
|
self.print_error(str(e))
|
|
else:
|
|
self.print_error('Got Response for {}'.format(addr))
|