mirror of
https://github.com/LBRYFoundation/LBRY-Vault.git
synced 2025-08-28 07:51:27 +00:00
lightning: remove hub based approach, port qt gui to lnbase
This commit is contained in:
parent
67dd377a05
commit
98f73348c1
14 changed files with 129 additions and 4314 deletions
|
@ -23,7 +23,6 @@
|
||||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
# SOFTWARE.
|
# SOFTWARE.
|
||||||
|
|
||||||
import queue
|
|
||||||
import sys
|
import sys
|
||||||
import datetime
|
import datetime
|
||||||
import copy
|
import copy
|
||||||
|
@ -47,7 +46,6 @@ from .storage import WalletStorage
|
||||||
from . import keystore
|
from . import keystore
|
||||||
from .wallet import Wallet, Imported_Wallet, Abstract_Wallet
|
from .wallet import Wallet, Imported_Wallet, Abstract_Wallet
|
||||||
from .mnemonic import Mnemonic
|
from .mnemonic import Mnemonic
|
||||||
from .import lightning
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .network import Network
|
from .network import Network
|
||||||
|
@ -746,22 +744,6 @@ class Commands:
|
||||||
# for the python console
|
# for the python console
|
||||||
return sorted(known_commands.keys())
|
return sorted(known_commands.keys())
|
||||||
|
|
||||||
@command("wn")
|
|
||||||
def lightning(self, lcmd, lightningargs=None):
|
|
||||||
q = queue.Queue()
|
|
||||||
class FakeQtSignal:
|
|
||||||
def emit(self, data):
|
|
||||||
q.put(data)
|
|
||||||
class MyConsole:
|
|
||||||
new_lightning_result = FakeQtSignal()
|
|
||||||
self.wallet.network.lightningrpc.setConsole(MyConsole())
|
|
||||||
if lightningargs:
|
|
||||||
lightningargs = json_decode(lightningargs)
|
|
||||||
else:
|
|
||||||
lightningargs = []
|
|
||||||
lightning.lightningCall(self.wallet.network.lightningrpc, lcmd)(*lightningargs)
|
|
||||||
return q.get(block=True, timeout=600)
|
|
||||||
|
|
||||||
def eval_bool(x: str) -> bool:
|
def eval_bool(x: str) -> bool:
|
||||||
if x == 'false': return False
|
if x == 'false': return False
|
||||||
if x == 'true': return True
|
if x == 'true': return True
|
||||||
|
@ -825,8 +807,7 @@ command_options = {
|
||||||
'show_fiat': (None, "Show fiat value of transactions"),
|
'show_fiat': (None, "Show fiat value of transactions"),
|
||||||
'year': (None, "Show history for a given year"),
|
'year': (None, "Show history for a given year"),
|
||||||
'fee_method': (None, "Fee estimation method to use"),
|
'fee_method': (None, "Fee estimation method to use"),
|
||||||
'fee_level': (None, "Float between 0.0 and 1.0, representing fee slider position"),
|
'fee_level': (None, "Float between 0.0 and 1.0, representing fee slider position")
|
||||||
'lightningargs':(None, "Arguments for an lncli subcommand, encoded as a JSON array"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -933,7 +914,6 @@ def add_global_options(parser):
|
||||||
group.add_argument("--testnet", action="store_true", dest="testnet", default=False, help="Use Testnet")
|
group.add_argument("--testnet", action="store_true", dest="testnet", default=False, help="Use Testnet")
|
||||||
group.add_argument("--regtest", action="store_true", dest="regtest", default=False, help="Use Regtest")
|
group.add_argument("--regtest", action="store_true", dest="regtest", default=False, help="Use Regtest")
|
||||||
group.add_argument("--simnet", action="store_true", dest="simnet", default=False, help="Use Simnet")
|
group.add_argument("--simnet", action="store_true", dest="simnet", default=False, help="Use Simnet")
|
||||||
group.add_argument("--lightning", action="store_true", dest="lightning", default=False, help="Enable Lightning support via hub")
|
|
||||||
group.add_argument("--lnbase", action="store_true", dest="lnbase", default=False, help="Enable Lightning support")
|
group.add_argument("--lnbase", action="store_true", dest="lnbase", default=False, help="Enable Lightning support")
|
||||||
|
|
||||||
def get_parser():
|
def get_parser():
|
||||||
|
|
|
@ -47,8 +47,6 @@ from electrum.util import (UserCancelled, PrintError,
|
||||||
|
|
||||||
from .installwizard import InstallWizard
|
from .installwizard import InstallWizard
|
||||||
|
|
||||||
from electrum.lightning import LightningUI
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from . import icons_rc
|
from . import icons_rc
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -130,11 +128,6 @@ class ElectrumGui(PrintError):
|
||||||
# the OS/window manager/etc might set *a dark theme*.
|
# the OS/window manager/etc might set *a dark theme*.
|
||||||
# Hence, try to choose colors accordingly:
|
# Hence, try to choose colors accordingly:
|
||||||
ColorScheme.update_from_widget(QWidget(), force_dark=use_dark_theme)
|
ColorScheme.update_from_widget(QWidget(), force_dark=use_dark_theme)
|
||||||
self.lightning = LightningUI(self.set_console_and_return_lightning)
|
|
||||||
|
|
||||||
def set_console_and_return_lightning(self):
|
|
||||||
self.windows[0].wallet.network.lightningrpc.setConsole(self.windows[0].console)
|
|
||||||
return self.windows[0].wallet.network.lightningrpc
|
|
||||||
|
|
||||||
def build_tray_menu(self):
|
def build_tray_menu(self):
|
||||||
# Avoid immediate GC of old menu when window closed via its action
|
# Avoid immediate GC of old menu when window closed via its action
|
||||||
|
|
|
@ -161,7 +161,7 @@ class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
|
||||||
tabs.addTab(self.create_history_tab(), QIcon(":icons/tab_history.png"), _('History'))
|
tabs.addTab(self.create_history_tab(), QIcon(":icons/tab_history.png"), _('History'))
|
||||||
tabs.addTab(self.send_tab, QIcon(":icons/tab_send.png"), _('Send'))
|
tabs.addTab(self.send_tab, QIcon(":icons/tab_send.png"), _('Send'))
|
||||||
tabs.addTab(self.receive_tab, QIcon(":icons/tab_receive.png"), _('Receive'))
|
tabs.addTab(self.receive_tab, QIcon(":icons/tab_receive.png"), _('Receive'))
|
||||||
if config.get("lightning", False):
|
if config.get("lnbase", False):
|
||||||
self.lightning_invoices_tab = self.create_lightning_invoices_tab(wallet)
|
self.lightning_invoices_tab = self.create_lightning_invoices_tab(wallet)
|
||||||
tabs.addTab(self.lightning_invoices_tab, _("Lightning Invoices"))
|
tabs.addTab(self.lightning_invoices_tab, _("Lightning Invoices"))
|
||||||
|
|
||||||
|
@ -810,11 +810,11 @@ class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
|
||||||
self.update_completions()
|
self.update_completions()
|
||||||
|
|
||||||
def create_lightning_invoices_tab(self, wallet):
|
def create_lightning_invoices_tab(self, wallet):
|
||||||
self.lightning_invoice_list = LightningInvoiceList(self, wallet.network.lightningworker, wallet.network.lightningrpc)
|
self.lightning_invoice_list = LightningInvoiceList(self, wallet.lnworker)
|
||||||
return self.lightning_invoice_list
|
return self.lightning_invoice_list
|
||||||
|
|
||||||
def create_lightning_channels_tab(self, wallet):
|
def create_lightning_channels_tab(self, wallet):
|
||||||
self.lightning_channels_list = LightningChannelsList(self, wallet.network.lightningworker, wallet.network.lightningrpc)
|
self.lightning_channels_list = LightningChannelsList(self, wallet.lnworker)
|
||||||
return self.lightning_channels_list
|
return self.lightning_channels_list
|
||||||
|
|
||||||
def create_history_tab(self):
|
def create_history_tab(self):
|
||||||
|
@ -1974,7 +1974,7 @@ class ElectrumWindow(QMainWindow, MessageBoxMixin, PrintError):
|
||||||
'wallet': self.wallet,
|
'wallet': self.wallet,
|
||||||
'network': self.network,
|
'network': self.network,
|
||||||
'plugins': self.gui_object.plugins,
|
'plugins': self.gui_object.plugins,
|
||||||
'l': self.gui_object.lightning,
|
'lightning' : self.wallet.lnworker.console_interface,
|
||||||
'window': self,
|
'window': self,
|
||||||
'config': self.config,
|
'config': self.config,
|
||||||
'electrum': electrum,
|
'electrum': electrum,
|
||||||
|
|
|
@ -67,9 +67,6 @@ if TYPE_CHECKING:
|
||||||
from .simple_config import SimpleConfig
|
from .simple_config import SimpleConfig
|
||||||
|
|
||||||
|
|
||||||
from .lightning import LightningRPC
|
|
||||||
from .lightning import LightningWorker
|
|
||||||
|
|
||||||
TX_STATUS = [
|
TX_STATUS = [
|
||||||
_('Unconfirmed'),
|
_('Unconfirmed'),
|
||||||
_('Unconfirmed parent'),
|
_('Unconfirmed parent'),
|
||||||
|
|
|
@ -3,11 +3,11 @@ import binascii, base64
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
from electrum.lightning import lightningCall
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
mapping = {0: "channel_point"}
|
# https://api.lightning.community/#listchannels
|
||||||
revMapp = {"channel_point": 0}
|
mapping = {0: "chan_id"}
|
||||||
|
revMapp = {"chan_id": 0}
|
||||||
datatable = OrderedDict([])
|
datatable = OrderedDict([])
|
||||||
|
|
||||||
class MyTableRow(QtWidgets.QTreeWidgetItem):
|
class MyTableRow(QtWidgets.QTreeWidgetItem):
|
||||||
|
@ -29,78 +29,64 @@ class MyTableRow(QtWidgets.QTreeWidgetItem):
|
||||||
|
|
||||||
def addChannelRow(new):
|
def addChannelRow(new):
|
||||||
made = MyTableRow(new)
|
made = MyTableRow(new)
|
||||||
datatable[new["channel_point"]] = made
|
datatable[new["chan_id"]] = made
|
||||||
datatable.move_to_end(new["channel_point"], last=False)
|
datatable.move_to_end(new["chan_id"], last=False)
|
||||||
return made
|
return made
|
||||||
|
|
||||||
def clickHandler(nodeIdInput, local_amt_inp, push_amt_inp, lightningRpc):
|
|
||||||
nodeId = nodeIdInput.text()
|
|
||||||
print("creating channel with connstr {}".format(nodeId))
|
|
||||||
lightningCall(lightningRpc, "openchannel")(str(nodeId), local_amt_inp.text(), push_amt_inp.text())
|
|
||||||
|
|
||||||
class LightningChannelsList(QtWidgets.QWidget):
|
class LightningChannelsList(QtWidgets.QWidget):
|
||||||
update_rows = QtCore.pyqtSignal(str, dict)
|
update_rows = QtCore.pyqtSignal(dict)
|
||||||
|
update_single_row = QtCore.pyqtSignal(dict)
|
||||||
|
|
||||||
def create_menu(self, position):
|
def clickHandler(self, nodeIdInput, local_amt_inp, push_amt_inp, lnworker):
|
||||||
menu = QtWidgets.QMenu()
|
nodeId = nodeIdInput.text()
|
||||||
cur = self._tv.currentItem()
|
print("creating channel with connstr {}".format(nodeId))
|
||||||
channel_point = cur["channel_point"]
|
local_amt = int(local_amt_inp.text())
|
||||||
def close():
|
|
||||||
params = [str(channel_point)] + (["--force"] if not cur["active"] else []) # TODO test if force is being used correctly
|
|
||||||
lightningCall(self.lightningRpc, "closechannel")(*params)
|
|
||||||
menu.addAction("Close channel", close)
|
|
||||||
menu.exec_(self._tv.viewport().mapToGlobal(position))
|
|
||||||
def lightningWorkerHandler(self, sourceClassName, obj):
|
|
||||||
new = {}
|
|
||||||
for k, v in obj.items():
|
|
||||||
try:
|
|
||||||
v = binascii.hexlify(base64.b64decode(v)).decode("ascii")
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
new[k] = v
|
|
||||||
try:
|
try:
|
||||||
obj = datatable[new["channel_point"]]
|
push_amt = int(push_amt_inp.text())
|
||||||
|
except ValueError:
|
||||||
|
push_amt = 0
|
||||||
|
assert local_amt >= 200000
|
||||||
|
assert local_amt >= push_amt
|
||||||
|
obj = lnworker.open_channel_from_other_thread(node_id=str(nodeId), local_amt=local_amt, push_amt=push_amt, emit_function=self.update_rows.emit, get_password=self.main_window.password_dialog)
|
||||||
|
|
||||||
|
@QtCore.pyqtSlot(dict)
|
||||||
|
def do_update_single_row(self, new):
|
||||||
|
try:
|
||||||
|
obj = datatable[new["chan_id"]]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("lightning channel_point {} unknown!".format(new["channel_point"]))
|
print("lightning chan_id {} unknown!".format(new["chan_id"]))
|
||||||
else:
|
else:
|
||||||
for k, v in new.items():
|
for k, v in new.items():
|
||||||
try:
|
try:
|
||||||
if obj[k] != v: obj[k] = v
|
if obj[k] != v: obj[k] = v
|
||||||
except KeyError:
|
except KeyError:
|
||||||
obj[k] = v
|
obj[k] = v
|
||||||
def lightningRpcHandler(self, methodName, obj):
|
|
||||||
if isinstance(obj, Exception):
|
|
||||||
try:
|
|
||||||
raise obj
|
|
||||||
except:
|
|
||||||
traceback.print_exc()
|
|
||||||
else:
|
|
||||||
self.update_rows.emit(methodName, obj)
|
|
||||||
|
|
||||||
def do_update_rows(self, methodName, obj):
|
def create_menu(self, position):
|
||||||
if methodName != "listchannels":
|
menu = QtWidgets.QMenu()
|
||||||
print("channel list ignoring reply {} to {}".format(obj, methodName))
|
cur = self._tv.currentItem()
|
||||||
return
|
def close():
|
||||||
|
print("closechannel result", lnworker.close_channel_from_other_thread(cur.di))
|
||||||
|
menu.addAction("Close channel", close)
|
||||||
|
menu.exec_(self._tv.viewport().mapToGlobal(position))
|
||||||
|
|
||||||
|
@QtCore.pyqtSlot(dict)
|
||||||
|
def do_update_rows(self, obj):
|
||||||
self._tv.clear()
|
self._tv.clear()
|
||||||
for i in obj["channels"]:
|
for i in obj["channels"]:
|
||||||
self._tv.insertTopLevelItem(0, addChannelRow(i))
|
self._tv.insertTopLevelItem(0, addChannelRow(i))
|
||||||
|
|
||||||
|
def __init__(self, parent, lnworker):
|
||||||
def __init__(self, parent, lightningWorker, lightningRpc):
|
|
||||||
QtWidgets.QWidget.__init__(self, parent)
|
QtWidgets.QWidget.__init__(self, parent)
|
||||||
|
self.main_window = parent
|
||||||
|
|
||||||
self.update_rows.connect(self.do_update_rows)
|
self.update_rows.connect(self.do_update_rows)
|
||||||
|
self.update_single_row.connect(self.do_update_single_row)
|
||||||
|
|
||||||
def tick():
|
self.lnworker = lnworker
|
||||||
lightningCall(lightningRpc, "listchannels")()
|
|
||||||
|
|
||||||
timer = QtCore.QTimer(self)
|
lnworker.subscribe_channel_list_updates_from_other_thread(self.update_rows.emit)
|
||||||
timer.timeout.connect(tick)
|
lnworker.subscribe_single_channel_update_from_other_thread(self.update_single_row.emit)
|
||||||
timer.start(5000)
|
|
||||||
|
|
||||||
lightningWorker.subscribe(self.lightningWorkerHandler)
|
|
||||||
lightningRpc.subscribe(self.lightningRpcHandler)
|
|
||||||
self.lightningRpc = lightningRpc
|
|
||||||
|
|
||||||
self._tv=QtWidgets.QTreeWidget(self)
|
self._tv=QtWidgets.QTreeWidget(self)
|
||||||
self._tv.setHeaderLabels([mapping[i] for i in range(len(mapping))])
|
self._tv.setHeaderLabels([mapping[i] for i in range(len(mapping))])
|
||||||
|
@ -113,7 +99,7 @@ class LightningChannelsList(QtWidgets.QWidget):
|
||||||
push_amt_inp = QtWidgets.QLineEdit(self)
|
push_amt_inp = QtWidgets.QLineEdit(self)
|
||||||
|
|
||||||
button = QtWidgets.QPushButton('Open channel', self)
|
button = QtWidgets.QPushButton('Open channel', self)
|
||||||
button.clicked.connect(lambda: clickHandler(nodeid_inp, local_amt_inp, push_amt_inp, lightningRpc))
|
button.clicked.connect(lambda: self.clickHandler(nodeid_inp, local_amt_inp, push_amt_inp, lnworker))
|
||||||
|
|
||||||
l=QtWidgets.QVBoxLayout(self)
|
l=QtWidgets.QVBoxLayout(self)
|
||||||
h=QtWidgets.QGridLayout(self)
|
h=QtWidgets.QGridLayout(self)
|
||||||
|
@ -139,71 +125,3 @@ class LightningChannelsList(QtWidgets.QWidget):
|
||||||
l.addWidget(self._tv)
|
l.addWidget(self._tv)
|
||||||
|
|
||||||
self.resize(2500,1000)
|
self.resize(2500,1000)
|
||||||
|
|
||||||
class MockLightningWorker:
|
|
||||||
def subscribe(self, handler):
|
|
||||||
pass
|
|
||||||
|
|
||||||
if __name__=="__main__":
|
|
||||||
import queue, threading, asyncio
|
|
||||||
from sys import argv, exit
|
|
||||||
import signal , traceback, os
|
|
||||||
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
|
|
||||||
async def loopstop():
|
|
||||||
loop.stop()
|
|
||||||
|
|
||||||
def signal_handler(signal, frame):
|
|
||||||
asyncio.run_coroutine_threadsafe(loopstop(), loop)
|
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, signal_handler)
|
|
||||||
|
|
||||||
a=QtWidgets.QApplication(argv)
|
|
||||||
|
|
||||||
gotReplyHandlerLock = threading.Lock()
|
|
||||||
gotReplyHandlerLock.acquire()
|
|
||||||
replyHandler = None
|
|
||||||
|
|
||||||
class MockLightningRPC:
|
|
||||||
def __init__(self, q):
|
|
||||||
self.queue = q
|
|
||||||
def subscribe(self, handler):
|
|
||||||
global replyHandler
|
|
||||||
replyHandler = handler
|
|
||||||
gotReplyHandlerLock.release()
|
|
||||||
|
|
||||||
q = queue.Queue()
|
|
||||||
w=LightningChannelsList(None, MockLightningWorker(), MockLightningRPC(q))
|
|
||||||
w.show()
|
|
||||||
w.raise_()
|
|
||||||
|
|
||||||
async def the_job():
|
|
||||||
try:
|
|
||||||
acquired_once = False
|
|
||||||
while loop.is_running():
|
|
||||||
try:
|
|
||||||
cmd = q.get_nowait()
|
|
||||||
except queue.Empty:
|
|
||||||
await asyncio.sleep(1)
|
|
||||||
continue
|
|
||||||
if not acquired_once:
|
|
||||||
gotReplyHandlerLock.acquire()
|
|
||||||
acquired_once = True
|
|
||||||
if cmd[0] == "listchannels":
|
|
||||||
#replyHandler("listchannels", Exception("Test exception"))
|
|
||||||
replyHandler("listchannels", {"channels": [{"channel_point": binascii.hexlify(os.urandom(32)).decode("ascii"), "active": True}]})
|
|
||||||
elif cmd[0] == "openchannel":
|
|
||||||
replyHandler("openchannel", {})
|
|
||||||
else:
|
|
||||||
print("mock rpc server ignoring", cmd[0])
|
|
||||||
except:
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
def asyncioThread():
|
|
||||||
loop.create_task(the_job())
|
|
||||||
loop.run_forever()
|
|
||||||
|
|
||||||
threading.Thread(target=asyncioThread).start()
|
|
||||||
|
|
||||||
exit(a.exec_())
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ import binascii
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import logging
|
import logging
|
||||||
from electrum.lightning import lightningCall
|
|
||||||
from .qrcodewidget import QRDialog
|
from .qrcodewidget import QRDialog
|
||||||
|
from PyQt5.QtCore import pyqtSignal, pyqtSlot
|
||||||
|
|
||||||
mapping = {0: "r_hash", 1: "pay_req", 2: "settled"}
|
mapping = {0: "r_hash", 1: "pay_req", 2: "settled"}
|
||||||
revMapp = {"r_hash": 0, "pay_req": 1, "settled": 2}
|
revMapp = {"r_hash": 0, "pay_req": 1, "settled": 2}
|
||||||
|
@ -38,23 +38,29 @@ def addInvoiceRow(new):
|
||||||
datatable.move_to_end(new["r_hash"], last=False)
|
datatable.move_to_end(new["r_hash"], last=False)
|
||||||
return made
|
return made
|
||||||
|
|
||||||
def clickHandler(numInput, treeView, lightningRpc):
|
|
||||||
amt = numInput.value()
|
|
||||||
if amt < 1:
|
|
||||||
print("value too small")
|
|
||||||
return
|
|
||||||
print("creating invoice with value {}".format(amt))
|
|
||||||
global idx
|
|
||||||
#obj = {
|
|
||||||
# "r_hash": binascii.hexlify((int.from_bytes(bytearray.fromhex("9500edb0994b7bc23349193486b25c82097045db641f35fa988c0e849acdec29"), "big")+idx).to_bytes(byteorder="big", length=32)).decode("ascii"),
|
|
||||||
# "pay_req": "lntb81920n1pdf258s" + str(idx),
|
|
||||||
# "settled": False
|
|
||||||
#}
|
|
||||||
#treeView.insertTopLevelItem(0, addInvoiceRow(obj))
|
|
||||||
idx += 1
|
|
||||||
lightningCall(lightningRpc, "addinvoice")("--amt=" + str(amt))
|
|
||||||
|
|
||||||
class LightningInvoiceList(QtWidgets.QWidget):
|
class LightningInvoiceList(QtWidgets.QWidget):
|
||||||
|
invoice_added_signal = QtCore.pyqtSignal(dict)
|
||||||
|
|
||||||
|
@QtCore.pyqtSlot(dict)
|
||||||
|
def invoice_added_handler(self, di):
|
||||||
|
self._tv.insertTopLevelItem(0, addInvoiceRow(invoice))
|
||||||
|
|
||||||
|
def clickHandler(self, numInput, treeView, lnworker):
|
||||||
|
amt = numInput.value()
|
||||||
|
if amt < 1:
|
||||||
|
print("value too small")
|
||||||
|
return
|
||||||
|
print("creating invoice with value {}".format(amt))
|
||||||
|
global idx
|
||||||
|
#obj = {
|
||||||
|
# "r_hash": binascii.hexlify((int.from_bytes(bytearray.fromhex("9500edb0994b7bc23349193486b25c82097045db641f35fa988c0e849acdec29"), "big")+idx).to_bytes(byteorder="big", length=32)).decode("ascii"),
|
||||||
|
# "pay_req": "lntb81920n1pdf258s" + str(idx),
|
||||||
|
# "settled": False
|
||||||
|
#}
|
||||||
|
#treeView.insertTopLevelItem(0, addInvoiceRow(obj))
|
||||||
|
idx += 1
|
||||||
|
lnworker.add_invoice_from_other_thread(amt)
|
||||||
|
|
||||||
def create_menu(self, position):
|
def create_menu(self, position):
|
||||||
menu = QtWidgets.QMenu()
|
menu = QtWidgets.QMenu()
|
||||||
pay_req = self._tv.currentItem()["pay_req"]
|
pay_req = self._tv.currentItem()["pay_req"]
|
||||||
|
@ -68,14 +74,11 @@ class LightningInvoiceList(QtWidgets.QWidget):
|
||||||
menu.addAction("Copy payment request", copy)
|
menu.addAction("Copy payment request", copy)
|
||||||
menu.addAction("Show payment request as QR code", qr)
|
menu.addAction("Show payment request as QR code", qr)
|
||||||
menu.exec_(self._tv.viewport().mapToGlobal(position))
|
menu.exec_(self._tv.viewport().mapToGlobal(position))
|
||||||
def lightningWorkerHandler(self, sourceClassName, obj):
|
|
||||||
new = {}
|
payment_received_signal = pyqtSignal(dict)
|
||||||
for k, v in obj.items():
|
|
||||||
try:
|
@pyqtSlot(dict)
|
||||||
v = binascii.hexlify(base64.b64decode(v)).decode("ascii")
|
def paymentReceived(self, new):
|
||||||
except:
|
|
||||||
pass
|
|
||||||
new[k] = v
|
|
||||||
try:
|
try:
|
||||||
obj = datatable[new["r_hash"]]
|
obj = datatable[new["r_hash"]]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -86,17 +89,15 @@ class LightningInvoiceList(QtWidgets.QWidget):
|
||||||
if obj[k] != v: obj[k] = v
|
if obj[k] != v: obj[k] = v
|
||||||
except KeyError:
|
except KeyError:
|
||||||
obj[k] = v
|
obj[k] = v
|
||||||
def lightningRpcHandler(self, methodName, obj):
|
|
||||||
if methodName != "addinvoice":
|
def __init__(self, parent, lnworker):
|
||||||
print("ignoring reply {} to {}".format(obj, methodName))
|
|
||||||
return
|
|
||||||
self._tv.insertTopLevelItem(0, addInvoiceRow(obj))
|
|
||||||
|
|
||||||
def __init__(self, parent, lightningWorker, lightningRpc):
|
|
||||||
QtWidgets.QWidget.__init__(self, parent)
|
QtWidgets.QWidget.__init__(self, parent)
|
||||||
|
|
||||||
lightningWorker.subscribe(self.lightningWorkerHandler)
|
self.payment_received_signal.connect(self.paymentReceived)
|
||||||
lightningRpc.subscribe(self.lightningRpcHandler)
|
self.invoice_added_signal.connect(self.invoice_added_handler)
|
||||||
|
|
||||||
|
lnworker.subscribe_payment_received_from_other_thread(self.payment_received_signal.emit)
|
||||||
|
lnworker.subscribe_invoice_added_from_other_thread(self.invoice_added_signal.emit)
|
||||||
|
|
||||||
self._tv=QtWidgets.QTreeWidget(self)
|
self._tv=QtWidgets.QTreeWidget(self)
|
||||||
self._tv.setHeaderLabels([mapping[i] for i in range(len(mapping))])
|
self._tv.setHeaderLabels([mapping[i] for i in range(len(mapping))])
|
||||||
|
@ -108,12 +109,12 @@ class LightningInvoiceList(QtWidgets.QWidget):
|
||||||
def keyPressEvent(self2, e):
|
def keyPressEvent(self2, e):
|
||||||
super(SatoshiCountSpinBox, self2).keyPressEvent(e)
|
super(SatoshiCountSpinBox, self2).keyPressEvent(e)
|
||||||
if QtCore.Qt.Key_Return == e.key():
|
if QtCore.Qt.Key_Return == e.key():
|
||||||
clickHandler(self2, self._tv, lightningRpc)
|
self.clickHandler(self2, self._tv, lnworker)
|
||||||
|
|
||||||
numInput = SatoshiCountSpinBox(self)
|
numInput = SatoshiCountSpinBox(self)
|
||||||
|
|
||||||
button = QtWidgets.QPushButton('Add invoice', self)
|
button = QtWidgets.QPushButton('Add invoice', self)
|
||||||
button.clicked.connect(lambda: clickHandler(numInput, self._tv, lightningRpc))
|
button.clicked.connect(lambda: self.clickHandler(numInput, self._tv, lnworker))
|
||||||
|
|
||||||
l=QtWidgets.QVBoxLayout(self)
|
l=QtWidgets.QVBoxLayout(self)
|
||||||
h=QtWidgets.QGridLayout(self)
|
h=QtWidgets.QGridLayout(self)
|
||||||
|
|
919
lib/lightning.py
919
lib/lightning.py
|
@ -1,919 +0,0 @@
|
||||||
import functools
|
|
||||||
import datetime
|
|
||||||
import sys
|
|
||||||
import struct
|
|
||||||
import traceback
|
|
||||||
import os.path
|
|
||||||
from .ln import rpc_pb2
|
|
||||||
|
|
||||||
from jsonrpclib import Server
|
|
||||||
from google.protobuf import json_format
|
|
||||||
import binascii
|
|
||||||
import ecdsa.util
|
|
||||||
import hashlib
|
|
||||||
from .bitcoin import EC_KEY, MySigningKey
|
|
||||||
from ecdsa.curves import SECP256k1
|
|
||||||
from . import bitcoin
|
|
||||||
from . import transaction
|
|
||||||
from . import keystore
|
|
||||||
|
|
||||||
import queue
|
|
||||||
|
|
||||||
import threading
|
|
||||||
import json
|
|
||||||
import base64
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from concurrent.futures import TimeoutError
|
|
||||||
|
|
||||||
WALLET = None
|
|
||||||
NETWORK = None
|
|
||||||
CONFIG = None
|
|
||||||
locked = set()
|
|
||||||
|
|
||||||
|
|
||||||
def WriteDb(json):
|
|
||||||
req = rpc_pb2.WriteDbRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
print("writedb unimplemented", req.dbData)
|
|
||||||
m = rpc_pb2.WriteDbResponse()
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def ConfirmedBalance(json):
|
|
||||||
request = rpc_pb2.ConfirmedBalanceRequest()
|
|
||||||
json_format.Parse(json, request)
|
|
||||||
m = rpc_pb2.ConfirmedBalanceResponse()
|
|
||||||
confs = request.confirmations
|
|
||||||
#witness = request.witness # bool
|
|
||||||
|
|
||||||
m.amount = sum(WALLET.get_balance())
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def NewAddress(json):
|
|
||||||
request = rpc_pb2.NewAddressRequest()
|
|
||||||
json_format.Parse(json, request)
|
|
||||||
m = rpc_pb2.NewAddressResponse()
|
|
||||||
if request.type == rpc_pb2.WITNESS_PUBKEY_HASH:
|
|
||||||
m.address = WALLET.get_unused_address()
|
|
||||||
elif request.type == rpc_pb2.NESTED_PUBKEY_HASH:
|
|
||||||
assert False, "cannot handle nested-pubkey-hash address type generation yet"
|
|
||||||
elif request.type == rpc_pb2.PUBKEY_HASH:
|
|
||||||
assert False, "cannot handle pubkey_hash generation yet"
|
|
||||||
else:
|
|
||||||
assert False, "unknown address type"
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
#def FetchRootKey(json):
|
|
||||||
# request = rpc_pb2.FetchRootKeyRequest()
|
|
||||||
# json_format.Parse(json, request)
|
|
||||||
# m = rpc_pb2.FetchRootKeyResponse()
|
|
||||||
# m.rootKey = WALLET.keystore.get_private_key([151,151,151,151], None)[0]
|
|
||||||
# msg = json_format.MessageToJson(m)
|
|
||||||
# return msg
|
|
||||||
|
|
||||||
|
|
||||||
cl = rpc_pb2.ListUnspentWitnessRequest
|
|
||||||
|
|
||||||
assert rpc_pb2.WITNESS_PUBKEY_HASH is not None
|
|
||||||
|
|
||||||
|
|
||||||
def ListUnspentWitness(json):
|
|
||||||
req = cl()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
confs = req.minConfirmations #TODO regard this
|
|
||||||
|
|
||||||
unspent = WALLET.get_utxos()
|
|
||||||
m = rpc_pb2.ListUnspentWitnessResponse()
|
|
||||||
for utxo in unspent:
|
|
||||||
# print(utxo)
|
|
||||||
# example:
|
|
||||||
# {'prevout_n': 0,
|
|
||||||
# 'address': 'sb1qt52ccplvtpehz7qvvqft2udf2eaqvfsal08xre',
|
|
||||||
# 'prevout_hash': '0d4caccd6e8a906c8ca22badf597c4dedc6dd7839f3cac3137f8f29212099882',
|
|
||||||
# 'coinbase': False,
|
|
||||||
# 'height': 326,
|
|
||||||
# 'value': 400000000}
|
|
||||||
|
|
||||||
global locked
|
|
||||||
if (utxo["prevout_hash"], utxo["prevout_n"]) in locked:
|
|
||||||
print("SKIPPING LOCKED OUTPOINT", utxo["prevout_hash"])
|
|
||||||
continue
|
|
||||||
towire = m.utxos.add()
|
|
||||||
towire.addressType = rpc_pb2.WITNESS_PUBKEY_HASH
|
|
||||||
towire.redeemScript = b""
|
|
||||||
towire.pkScript = b""
|
|
||||||
towire.witnessScript = bytes(bytearray.fromhex(
|
|
||||||
bitcoin.address_to_script(utxo["address"])))
|
|
||||||
towire.value = utxo["value"]
|
|
||||||
towire.outPoint.hash = utxo["prevout_hash"]
|
|
||||||
towire.outPoint.index = utxo["prevout_n"]
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
def LockOutpoint(json):
|
|
||||||
req = rpc_pb2.LockOutpointRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
global locked
|
|
||||||
locked.add((req.outpoint.hash, req.outpoint.index))
|
|
||||||
|
|
||||||
|
|
||||||
def UnlockOutpoint(json):
|
|
||||||
req = rpc_pb2.UnlockOutpointRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
global locked
|
|
||||||
# throws KeyError if not existing. Use .discard() if we do not care
|
|
||||||
locked.remove((req.outpoint.hash, req.outpoint.index))
|
|
||||||
|
|
||||||
def ListTransactionDetails(json):
|
|
||||||
global WALLET
|
|
||||||
global NETWORK
|
|
||||||
m = rpc_pb2.ListTransactionDetailsResponse()
|
|
||||||
for tx_hash, height, conf, timestamp, delta, balance in WALLET.get_history():
|
|
||||||
if height == 0:
|
|
||||||
print("WARNING", tx_hash, "has zero height!")
|
|
||||||
detail = m.details.add()
|
|
||||||
detail.hash = tx_hash
|
|
||||||
detail.value = delta
|
|
||||||
detail.numConfirmations = conf
|
|
||||||
detail.blockHash = NETWORK.blockchain().get_hash(height)
|
|
||||||
detail.blockHeight = height
|
|
||||||
detail.timestamp = timestamp
|
|
||||||
detail.totalFees = 1337 # TODO
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
def FetchInputInfo(json):
|
|
||||||
req = rpc_pb2.FetchInputInfoRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
has = req.outPoint.hash
|
|
||||||
idx = req.outPoint.index
|
|
||||||
txoinfo = WALLET.txo.get(has, {})
|
|
||||||
m = rpc_pb2.FetchInputInfoResponse()
|
|
||||||
if has in WALLET.transactions:
|
|
||||||
tx = WALLET.transactions[has]
|
|
||||||
m.mine = True
|
|
||||||
else:
|
|
||||||
tx = WALLET.get_input_tx(has)
|
|
||||||
print("did not find tx with hash", has)
|
|
||||||
print("tx", tx)
|
|
||||||
|
|
||||||
m.mine = False
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
outputs = tx.outputs()
|
|
||||||
assert {bitcoin.TYPE_SCRIPT: "SCRIPT", bitcoin.TYPE_ADDRESS: "ADDRESS",
|
|
||||||
bitcoin.TYPE_PUBKEY: "PUBKEY"}[outputs[idx][0]] == "ADDRESS"
|
|
||||||
scr = transaction.Transaction.pay_script(outputs[idx][0], outputs[idx][1])
|
|
||||||
m.txOut.value = outputs[idx][2] # type, addr, val
|
|
||||||
m.txOut.pkScript = bytes(bytearray.fromhex(scr))
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def SendOutputs(json):
|
|
||||||
global NETWORK, WALLET, CONFIG
|
|
||||||
|
|
||||||
req = rpc_pb2.SendOutputsRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
m = rpc_pb2.SendOutputsResponse()
|
|
||||||
|
|
||||||
elecOutputs = [(bitcoin.TYPE_SCRIPT, binascii.hexlify(txout.pkScript).decode("utf-8"), txout.value) for txout in req.outputs]
|
|
||||||
|
|
||||||
print("ignoring feeSatPerByte", req.feeSatPerByte) # TODO
|
|
||||||
|
|
||||||
tx = None
|
|
||||||
try:
|
|
||||||
# outputs, password, config, fee
|
|
||||||
tx = WALLET.mktx(elecOutputs, None, CONFIG, 1000)
|
|
||||||
except Exception as e:
|
|
||||||
m.success = False
|
|
||||||
m.error = str(e)
|
|
||||||
m.resultHash = ""
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
publishTxThread(tx)
|
|
||||||
m.success = True
|
|
||||||
m.error = ""
|
|
||||||
m.resultHash = tx.txid()
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
def isSynced():
|
|
||||||
global NETWORK
|
|
||||||
local_height, server_height = NETWORK.get_status_value("updated")
|
|
||||||
synced = server_height != 0 and NETWORK.is_up_to_date() and local_height >= server_height
|
|
||||||
return synced, local_height, server_height
|
|
||||||
|
|
||||||
def IsSynced(json):
|
|
||||||
m = rpc_pb2.IsSyncedResponse()
|
|
||||||
m.synced, localHeight, _ = isSynced()
|
|
||||||
block = NETWORK.blockchain().read_header(localHeight)
|
|
||||||
m.lastBlockTimeStamp = block["timestamp"]
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
def SignMessage(json):
|
|
||||||
req = rpc_pb2.SignMessageRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
m = rpc_pb2.SignMessageResponse()
|
|
||||||
|
|
||||||
pri = privKeyForPubKey(req.pubKey)
|
|
||||||
|
|
||||||
m.signature = pri.sign(bitcoin.Hash(req.messageToBeSigned), ecdsa.util.sigencode_der)
|
|
||||||
m.error = ""
|
|
||||||
m.success = True
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
def LEtobytes(x, l):
|
|
||||||
if l == 2:
|
|
||||||
fmt = "<H"
|
|
||||||
elif l == 4:
|
|
||||||
fmt = "<I"
|
|
||||||
elif l == 8:
|
|
||||||
fmt = "<Q"
|
|
||||||
else:
|
|
||||||
assert False, "invalid format for LEtobytes"
|
|
||||||
return struct.pack(fmt, x)
|
|
||||||
|
|
||||||
|
|
||||||
def toint(x):
|
|
||||||
if len(x) == 1:
|
|
||||||
return ord(x)
|
|
||||||
elif len(x) == 2:
|
|
||||||
fmt = ">H"
|
|
||||||
elif len(x) == 4:
|
|
||||||
fmt = ">I"
|
|
||||||
elif len(x) == 8:
|
|
||||||
fmt = ">Q"
|
|
||||||
else:
|
|
||||||
assert False, "invalid length for toint(): " + str(len(x))
|
|
||||||
return struct.unpack(fmt, x)[0]
|
|
||||||
|
|
||||||
class TxSigHashes(object):
|
|
||||||
def __init__(self, hashOutputs=None, hashSequence=None, hashPrevOuts=None):
|
|
||||||
self.hashOutputs = hashOutputs
|
|
||||||
self.hashSequence = hashSequence
|
|
||||||
self.hashPrevOuts = hashPrevOuts
|
|
||||||
|
|
||||||
|
|
||||||
class Output(object):
|
|
||||||
def __init__(self, value=None, pkScript=None):
|
|
||||||
assert value is not None and pkScript is not None
|
|
||||||
self.value = value
|
|
||||||
self.pkScript = pkScript
|
|
||||||
|
|
||||||
|
|
||||||
class InputScript(object):
|
|
||||||
def __init__(self, scriptSig, witness):
|
|
||||||
assert witness is None or type(witness[0]) is type(bytes([]))
|
|
||||||
assert type(scriptSig) is type(bytes([]))
|
|
||||||
self.scriptSig = scriptSig
|
|
||||||
self.witness = witness
|
|
||||||
|
|
||||||
|
|
||||||
def tweakPrivKey(basePriv, commitTweak):
|
|
||||||
tweakInt = int.from_bytes(commitTweak, byteorder="big")
|
|
||||||
tweakInt += basePriv.secret # D is secret
|
|
||||||
tweakInt %= SECP256k1.generator.order()
|
|
||||||
return EC_KEY(tweakInt.to_bytes(32, 'big'))
|
|
||||||
|
|
||||||
def singleTweakBytes(commitPoint, basePoint):
|
|
||||||
m = hashlib.sha256()
|
|
||||||
m.update(bytearray.fromhex(commitPoint))
|
|
||||||
m.update(bytearray.fromhex(basePoint))
|
|
||||||
return m.digest()
|
|
||||||
|
|
||||||
def deriveRevocationPrivKey(revokeBasePriv, commitSecret):
|
|
||||||
revokeTweakBytes = singleTweakBytes(revokeBasePriv.get_public_key(True),
|
|
||||||
commitSecret.get_public_key(True))
|
|
||||||
revokeTweakInt = int.from_bytes(revokeTweakBytes, byteorder="big")
|
|
||||||
|
|
||||||
commitTweakBytes = singleTweakBytes(commitSecret.get_public_key(True),
|
|
||||||
revokeBasePriv.get_public_key(True))
|
|
||||||
commitTweakInt = int.from_bytes(commitTweakBytes, byteorder="big")
|
|
||||||
|
|
||||||
revokeHalfPriv = revokeTweakInt * revokeBasePriv.secret # D is secret
|
|
||||||
commitHalfPriv = commitTweakInt * commitSecret.secret
|
|
||||||
|
|
||||||
revocationPriv = revokeHalfPriv + commitHalfPriv
|
|
||||||
revocationPriv %= SECP256k1.generator.order()
|
|
||||||
|
|
||||||
return EC_KEY(revocationPriv.to_bytes(32, byteorder="big"))
|
|
||||||
|
|
||||||
|
|
||||||
def maybeTweakPrivKey(signdesc, pri):
|
|
||||||
if len(signdesc.singleTweak) > 0:
|
|
||||||
pri2 = tweakPrivKey(pri, signdesc.singleTweak)
|
|
||||||
elif len(signdesc.doubleTweak) > 0:
|
|
||||||
pri2 = deriveRevocationPrivKey(pri, EC_KEY(signdesc.doubleTweak))
|
|
||||||
else:
|
|
||||||
pri2 = pri
|
|
||||||
|
|
||||||
if pri2 != pri:
|
|
||||||
have_keys = WALLET.storage.get("lightning_extra_keys", [])
|
|
||||||
if pri2.secret not in have_keys:
|
|
||||||
WALLET.storage.put("lightning_extra_keys", have_keys + [pri2.secret])
|
|
||||||
WALLET.storage.write()
|
|
||||||
print("saved new tweaked key", pri2.secret)
|
|
||||||
|
|
||||||
return pri2
|
|
||||||
|
|
||||||
|
|
||||||
def isWitnessPubKeyHash(script):
|
|
||||||
if len(script) != 2:
|
|
||||||
return False
|
|
||||||
haveop0 = (transaction.opcodes.OP_0 == script[0][0])
|
|
||||||
haveopdata20 = (20 == script[1][0])
|
|
||||||
return haveop0 and haveopdata20
|
|
||||||
|
|
||||||
#// calcWitnessSignatureHash computes the sighash digest of a transaction's
|
|
||||||
#// segwit input using the new, optimized digest calculation algorithm defined
|
|
||||||
#// in BIP0143: https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki.
|
|
||||||
#// This function makes use of pre-calculated sighash fragments stored within
|
|
||||||
#// the passed HashCache to eliminate duplicate hashing computations when
|
|
||||||
#// calculating the final digest, reducing the complexity from O(N^2) to O(N).
|
|
||||||
#// Additionally, signatures now cover the input value of the referenced unspent
|
|
||||||
#// output. This allows offline, or hardware wallets to compute the exact amount
|
|
||||||
#// being spent, in addition to the final transaction fee. In the case the
|
|
||||||
#// wallet if fed an invalid input amount, the real sighash will differ causing
|
|
||||||
#// the produced signature to be invalid.
|
|
||||||
|
|
||||||
|
|
||||||
def calcWitnessSignatureHash(original, sigHashes, hashType, tx, idx, amt):
|
|
||||||
assert len(original) != 0
|
|
||||||
decoded = transaction.deserialize(binascii.hexlify(tx).decode("utf-8"))
|
|
||||||
if idx > len(decoded["inputs"]) - 1:
|
|
||||||
raise Exception("invalid inputIndex")
|
|
||||||
txin = decoded["inputs"][idx]
|
|
||||||
#tohash = transaction.Transaction.serialize_witness(txin)
|
|
||||||
sigHash = LEtobytes(decoded["version"], 4)
|
|
||||||
if toint(hashType) & toint(sigHashAnyOneCanPay) == 0:
|
|
||||||
sigHash += bytes(bytearray.fromhex(sigHashes.hashPrevOuts))[::-1]
|
|
||||||
else:
|
|
||||||
sigHash += b"\x00" * 32
|
|
||||||
|
|
||||||
if toint(hashType) & toint(sigHashAnyOneCanPay) == 0 and toint(hashType) & toint(sigHashMask) != toint(sigHashSingle) and toint(hashType) & toint(sigHashMask) != toint(sigHashNone):
|
|
||||||
sigHash += bytes(bytearray.fromhex(sigHashes.hashSequence))[::-1]
|
|
||||||
else:
|
|
||||||
sigHash += b"\x00" * 32
|
|
||||||
|
|
||||||
sigHash += bytes(bytearray.fromhex(txin["prevout_hash"]))[::-1]
|
|
||||||
sigHash += LEtobytes(txin["prevout_n"], 4)
|
|
||||||
# byte 72
|
|
||||||
|
|
||||||
subscript = list(transaction.script_GetOp(original))
|
|
||||||
if isWitnessPubKeyHash(subscript):
|
|
||||||
sigHash += b"\x19"
|
|
||||||
sigHash += bytes([transaction.opcodes.OP_DUP])
|
|
||||||
sigHash += bytes([transaction.opcodes.OP_HASH160])
|
|
||||||
sigHash += b"\x14" # 20 bytes
|
|
||||||
assert len(subscript) == 2, subscript
|
|
||||||
opcode, data, length = subscript[1]
|
|
||||||
sigHash += data
|
|
||||||
sigHash += bytes([transaction.opcodes.OP_EQUALVERIFY])
|
|
||||||
sigHash += bytes([transaction.opcodes.OP_CHECKSIG])
|
|
||||||
else:
|
|
||||||
# For p2wsh outputs, and future outputs, the script code is
|
|
||||||
# the original script, with all code separators removed,
|
|
||||||
# serialized with a var int length prefix.
|
|
||||||
|
|
||||||
assert len(sigHash) == 104, len(sigHash)
|
|
||||||
sigHash += bytes(bytearray.fromhex(bitcoin.var_int(len(original))))
|
|
||||||
assert len(sigHash) == 105, len(sigHash)
|
|
||||||
|
|
||||||
sigHash += original
|
|
||||||
|
|
||||||
sigHash += LEtobytes(amt, 8)
|
|
||||||
sigHash += LEtobytes(txin["sequence"], 4)
|
|
||||||
|
|
||||||
if toint(hashType) & toint(sigHashSingle) != toint(sigHashSingle) and toint(hashType) & toint(sigHashNone) != toint(sigHashNone):
|
|
||||||
sigHash += bytes(bytearray.fromhex(sigHashes.hashOutputs))[::-1]
|
|
||||||
elif toint(hashtype) & toint(sigHashMask) == toint(sigHashSingle) and idx < len(decoded["outputs"]):
|
|
||||||
raise Exception("TODO 1")
|
|
||||||
else:
|
|
||||||
raise Exception("TODO 2")
|
|
||||||
|
|
||||||
sigHash += LEtobytes(decoded["lockTime"], 4)
|
|
||||||
sigHash += LEtobytes(toint(hashType), 4)
|
|
||||||
|
|
||||||
return transaction.Hash(sigHash)
|
|
||||||
|
|
||||||
#// RawTxInWitnessSignature returns the serialized ECDA signature for the input
|
|
||||||
#// idx of the given transaction, with the hashType appended to it. This
|
|
||||||
#// function is identical to RawTxInSignature, however the signature generated
|
|
||||||
#// signs a new sighash digest defined in BIP0143.
|
|
||||||
# func RawTxInWitnessSignature(tx *MsgTx, sigHashes *TxSigHashes, idx int,
|
|
||||||
# amt int64, subScript []byte, hashType SigHashType,
|
|
||||||
# key *btcec.PrivateKey) ([]byte, error) {
|
|
||||||
|
|
||||||
|
|
||||||
def rawTxInWitnessSignature(tx, sigHashes, idx, amt, subscript, hashType, key):
|
|
||||||
digest = calcWitnessSignatureHash(
|
|
||||||
subscript, sigHashes, hashType, tx, idx, amt)
|
|
||||||
return key.sign(digest, sigencode=ecdsa.util.sigencode_der) + hashType
|
|
||||||
|
|
||||||
# WitnessSignature creates an input witness stack for tx to spend BTC sent
|
|
||||||
# from a previous output to the owner of privKey using the p2wkh script
|
|
||||||
# template. The passed transaction must contain all the inputs and outputs as
|
|
||||||
# dictated by the passed hashType. The signature generated observes the new
|
|
||||||
# transaction digest algorithm defined within BIP0143.
|
|
||||||
def witnessSignature(tx, sigHashes, idx, amt, subscript, hashType, privKey, compress):
|
|
||||||
sig = rawTxInWitnessSignature(
|
|
||||||
tx, sigHashes, idx, amt, subscript, hashType, privKey)
|
|
||||||
|
|
||||||
pkData = bytes(bytearray.fromhex(
|
|
||||||
privKey.get_public_key(compressed=compress)))
|
|
||||||
|
|
||||||
return sig, pkData
|
|
||||||
|
|
||||||
|
|
||||||
sigHashMask = b"\x1f"
|
|
||||||
|
|
||||||
sigHashAll = b"\x01"
|
|
||||||
sigHashNone = b"\x02"
|
|
||||||
sigHashSingle = b"\x03"
|
|
||||||
sigHashAnyOneCanPay = b"\x80"
|
|
||||||
|
|
||||||
test = rpc_pb2.ComputeInputScriptResponse()
|
|
||||||
|
|
||||||
test.witnessScript.append(b"\x01")
|
|
||||||
test.witnessScript.append(b"\x02")
|
|
||||||
|
|
||||||
|
|
||||||
def SignOutputRaw(json):
|
|
||||||
req = rpc_pb2.SignOutputRawRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
#assert len(req.signDesc.pubKey) in [33, 0]
|
|
||||||
assert len(req.signDesc.doubleTweak) in [32, 0]
|
|
||||||
assert len(req.signDesc.sigHashes.hashPrevOuts) == 64
|
|
||||||
assert len(req.signDesc.sigHashes.hashSequence) == 64
|
|
||||||
assert len(req.signDesc.sigHashes.hashOutputs) == 64
|
|
||||||
|
|
||||||
m = rpc_pb2.SignOutputRawResponse()
|
|
||||||
|
|
||||||
m.signature = signOutputRaw(req.tx, req.signDesc)
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def signOutputRaw(tx, signDesc):
|
|
||||||
pri = derivePrivKey(signDesc.keyDescriptor)
|
|
||||||
assert pri is not None
|
|
||||||
pri2 = maybeTweakPrivKey(signDesc, pri)
|
|
||||||
sig = rawTxInWitnessSignature(tx, signDesc.sigHashes, signDesc.inputIndex,
|
|
||||||
signDesc.output.value, signDesc.witnessScript, sigHashAll, pri2)
|
|
||||||
return sig[:len(sig) - 1]
|
|
||||||
|
|
||||||
def publishTxThread(tx):
|
|
||||||
global NETWORK
|
|
||||||
def target(tx, NETWORK):
|
|
||||||
try:
|
|
||||||
res = NETWORK.broadcast(tx)
|
|
||||||
print("PUBLISH TRANSACTION IN SEPARATE THREAD PRODUCED", res)
|
|
||||||
except:
|
|
||||||
traceback.print_exc()
|
|
||||||
threading.Thread(target=target, args=(tx, NETWORK)).start()
|
|
||||||
|
|
||||||
async def PublishTransaction(json):
|
|
||||||
req = rpc_pb2.PublishTransactionRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
tx = transaction.Transaction(binascii.hexlify(req.tx).decode("utf-8"))
|
|
||||||
publishTxThread(tx)
|
|
||||||
m = rpc_pb2.PublishTransactionResponse()
|
|
||||||
m.success = True
|
|
||||||
m.error = ""
|
|
||||||
if m.error:
|
|
||||||
print("PublishTransaction", m.error)
|
|
||||||
if "Missing inputs" in m.error:
|
|
||||||
print("inputs", tx.inputs())
|
|
||||||
return json_format.MessageToJson(m)
|
|
||||||
|
|
||||||
|
|
||||||
def ComputeInputScript(json):
|
|
||||||
req = rpc_pb2.ComputeInputScriptRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
#assert len(req.signDesc.pubKey) in [33, 0]
|
|
||||||
assert len(req.signDesc.doubleTweak) in [32, 0]
|
|
||||||
assert len(req.signDesc.sigHashes.hashPrevOuts) == 64
|
|
||||||
assert len(req.signDesc.sigHashes.hashSequence) == 64
|
|
||||||
assert len(req.signDesc.sigHashes.hashOutputs) == 64
|
|
||||||
# singleTweak , witnessScript variable length
|
|
||||||
|
|
||||||
try:
|
|
||||||
inpscr = computeInputScript(req.tx, req.signDesc)
|
|
||||||
except:
|
|
||||||
print("catched!")
|
|
||||||
traceback.print_exc()
|
|
||||||
return None
|
|
||||||
|
|
||||||
m = rpc_pb2.ComputeInputScriptResponse()
|
|
||||||
|
|
||||||
m.witnessScript.append(inpscr.witness[0])
|
|
||||||
m.witnessScript.append(inpscr.witness[1])
|
|
||||||
m.scriptSig = inpscr.scriptSig
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def fetchPrivKey(str_address, keyLocatorFamily, keyLocatorIndex):
|
|
||||||
pri = None
|
|
||||||
|
|
||||||
if str_address is not None:
|
|
||||||
pri, redeem_script = WALLET.export_private_key(str_address, None)
|
|
||||||
|
|
||||||
if redeem_script:
|
|
||||||
print("ignoring redeem script", redeem_script)
|
|
||||||
|
|
||||||
typ, pri, compressed = bitcoin.deserialize_privkey(pri)
|
|
||||||
if keyLocatorFamily == 0 and keyLocatorIndex == 0: return EC_KEY(pri)
|
|
||||||
|
|
||||||
ks = keystore.BIP32_KeyStore({})
|
|
||||||
der = "m/0'/"
|
|
||||||
xtype = 'p2wpkh'
|
|
||||||
ks.add_xprv_from_seed(pri, xtype, der)
|
|
||||||
else:
|
|
||||||
ks = WALLET.keystore
|
|
||||||
|
|
||||||
if keyLocatorFamily != 0 or keyLocatorIndex != 0:
|
|
||||||
pri = ks.get_private_key([1017, keyLocatorFamily, keyLocatorIndex], password=None)[0]
|
|
||||||
pri = EC_KEY(pri)
|
|
||||||
|
|
||||||
assert pri is not None
|
|
||||||
|
|
||||||
return pri
|
|
||||||
|
|
||||||
|
|
||||||
def computeInputScript(tx, signdesc):
|
|
||||||
typ, str_address = transaction.get_address_from_output_script(
|
|
||||||
signdesc.output.pkScript)
|
|
||||||
assert typ != bitcoin.TYPE_SCRIPT
|
|
||||||
|
|
||||||
assert len(signdesc.keyDescriptor.pubKey) == 0
|
|
||||||
pri = fetchPrivKey(str_address, signdesc.keyDescriptor.keyLocator.family, signdesc.keyDescriptor.keyLocator.index)
|
|
||||||
|
|
||||||
isNestedWitness = False # because NewAddress only does native addresses
|
|
||||||
|
|
||||||
witnessProgram = None
|
|
||||||
ourScriptSig = None
|
|
||||||
|
|
||||||
if isNestedWitness:
|
|
||||||
pub = pri.get_public_key()
|
|
||||||
|
|
||||||
scr = bitcoin.hash_160(pub)
|
|
||||||
|
|
||||||
witnessProgram = b"\x00\x14" + scr
|
|
||||||
|
|
||||||
# \x14 is OP_20
|
|
||||||
ourScriptSig = b"\x16\x00\x14" + scr
|
|
||||||
else:
|
|
||||||
# TODO TEST
|
|
||||||
witnessProgram = signdesc.output.pkScript
|
|
||||||
ourScriptSig = b""
|
|
||||||
print("set empty ourScriptSig")
|
|
||||||
print("witnessProgram", witnessProgram)
|
|
||||||
|
|
||||||
# If a tweak (single or double) is specified, then we'll need to use
|
|
||||||
# this tweak to derive the final private key to be used for signing
|
|
||||||
# this output.
|
|
||||||
pri2 = maybeTweakPrivKey(signdesc, pri)
|
|
||||||
|
|
||||||
#
|
|
||||||
# Generate a valid witness stack for the input.
|
|
||||||
# TODO(roasbeef): adhere to passed HashType
|
|
||||||
witnessScript, pkData = witnessSignature(tx, signdesc.sigHashes,
|
|
||||||
signdesc.inputIndex, signdesc.output.value, witnessProgram,
|
|
||||||
sigHashAll, pri2, True)
|
|
||||||
return InputScript(witness=(witnessScript, pkData), scriptSig=ourScriptSig)
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
QueueItem = namedtuple("QueueItem", ["methodName", "args"])
|
|
||||||
|
|
||||||
class LightningRPC:
|
|
||||||
def __init__(self):
|
|
||||||
super(LightningRPC, self).__init__()
|
|
||||||
self.queue = queue.Queue()
|
|
||||||
self.subscribers = []
|
|
||||||
self.console = None
|
|
||||||
|
|
||||||
# overridden
|
|
||||||
async def run(self, netAndWalLock):
|
|
||||||
while asyncio.get_event_loop().is_running():
|
|
||||||
try:
|
|
||||||
qitem = self.queue.get(block=False)
|
|
||||||
except queue.Empty:
|
|
||||||
await asyncio.sleep(5)
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
def lightningRpcNetworkRequestThreadTarget(qitem):
|
|
||||||
machine = CONFIG.get('lndhost', '127.0.0.1')
|
|
||||||
applyMethodName = lambda x: functools.partial(x, qitem.methodName)
|
|
||||||
client = Server("http://" + machine + ":8090")
|
|
||||||
argumentStrings = [str(x) for x in qitem.args]
|
|
||||||
lightningSessionKey = base64.b64encode(privateKeyHash[:6]).decode("ascii")
|
|
||||||
resolvedMethod = getattr(client, qitem.methodName)
|
|
||||||
try:
|
|
||||||
result = resolvedMethod(lightningSessionKey, *argumentStrings)
|
|
||||||
except Exception as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
for i in self.subscribers: applyMethodName(i)(e)
|
|
||||||
raise
|
|
||||||
toprint = result
|
|
||||||
try:
|
|
||||||
assert type(result) is not str, result
|
|
||||||
assert result["stderr"] == "" and result["returncode"] == 0, "LightningRPC detected error: " + result["stderr"]
|
|
||||||
toprint = json.loads(result["stdout"])
|
|
||||||
for i in self.subscribers: applyMethodName(i)(toprint)
|
|
||||||
except Exception as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
for i in self.subscribers: applyMethodName(i)(e)
|
|
||||||
if self.console:
|
|
||||||
self.console.new_lightning_result.emit(json.dumps(toprint, indent=4))
|
|
||||||
threading.Thread(target=lightningRpcNetworkRequestThreadTarget, args=(qitem, )).start()
|
|
||||||
def setConsole(self, console):
|
|
||||||
self.console = console
|
|
||||||
def subscribe(self, notifyFunction):
|
|
||||||
self.subscribers.append(notifyFunction)
|
|
||||||
def clearSubscribers(self):
|
|
||||||
self.subscribers = []
|
|
||||||
|
|
||||||
def lightningCall(rpc, methodName):
|
|
||||||
def fun(*args):
|
|
||||||
rpc.queue.put(QueueItem(methodName, args))
|
|
||||||
return fun
|
|
||||||
|
|
||||||
class LightningUI():
|
|
||||||
def __init__(self, lightningGetter):
|
|
||||||
self.rpc = lightningGetter
|
|
||||||
def __getattr__(self, nam):
|
|
||||||
synced, local, server = isSynced()
|
|
||||||
if not synced:
|
|
||||||
return lambda *args: "Not synced yet: local/server: {}/{}".format(local, server)
|
|
||||||
return lightningCall(self.rpc(), nam)
|
|
||||||
|
|
||||||
privateKeyHash = None
|
|
||||||
|
|
||||||
class LightningWorker:
|
|
||||||
def __init__(self, wallet, network, config):
|
|
||||||
global privateKeyHash
|
|
||||||
super(LightningWorker, self).__init__()
|
|
||||||
self.server = None
|
|
||||||
self.wallet = wallet
|
|
||||||
self.network = network
|
|
||||||
self.config = config
|
|
||||||
ks = self.wallet.keystore
|
|
||||||
assert hasattr(ks, "xprv"), "Wallet must have xprv, can't be e.g. imported"
|
|
||||||
try:
|
|
||||||
xprv = ks.get_master_private_key(None)
|
|
||||||
xprv, xpub = bitcoin.bip32_private_derivation(xprv, "m/", "m/152/152/152/152")
|
|
||||||
except:
|
|
||||||
raise Exception("Could not get master private key, is the wallet password protected?")
|
|
||||||
tupl = bitcoin.deserialize_xprv(xprv)
|
|
||||||
privKey = tupl[-1]
|
|
||||||
assert type(privKey) is type(bytes([]))
|
|
||||||
privateKeyHash = bitcoin.Hash(privKey)
|
|
||||||
deser = bitcoin.deserialize_xpub(wallet.keystore.xpub)
|
|
||||||
assert deser[0] == "p2wpkh", deser
|
|
||||||
self.subscribers = []
|
|
||||||
|
|
||||||
async def run(self, netAndWalLock):
|
|
||||||
global WALLET, NETWORK
|
|
||||||
global CONFIG
|
|
||||||
global globalIdx
|
|
||||||
|
|
||||||
wasAlreadyUpToDate = False
|
|
||||||
while asyncio.get_event_loop().is_running():
|
|
||||||
WALLET = self.wallet
|
|
||||||
NETWORK = self.network
|
|
||||||
CONFIG = self.config
|
|
||||||
machine = CONFIG.get('lndhost', '127.0.0.1')
|
|
||||||
globalIdx = WALLET.storage.get("lightning_global_key_index", 0)
|
|
||||||
if globalIdx != 0:
|
|
||||||
print("initial lightning global key index", globalIdx)
|
|
||||||
writer = None
|
|
||||||
print("OPENING CONNECTION")
|
|
||||||
try:
|
|
||||||
reader, writer = await asyncio.wait_for(asyncio.open_connection(machine, 1080), 5)
|
|
||||||
writer.write(b"MAGIC")
|
|
||||||
writer.write(privateKeyHash[:6])
|
|
||||||
await asyncio.wait_for(writer.drain(), 5)
|
|
||||||
while asyncio.get_event_loop().is_running():
|
|
||||||
print(datetime.datetime.now(), "READING REQUEST")
|
|
||||||
obj = await readJson(reader)
|
|
||||||
if not obj: continue
|
|
||||||
if "id" not in obj:
|
|
||||||
print("Invoice update?", obj)
|
|
||||||
for i in self.subscribers: i(obj)
|
|
||||||
continue
|
|
||||||
print(datetime.datetime.now(), "making reply")
|
|
||||||
await asyncio.wait_for(readReqAndReply(obj, writer, netAndWalLock), 10)
|
|
||||||
except:
|
|
||||||
traceback.print_exc()
|
|
||||||
await asyncio.sleep(5)
|
|
||||||
continue
|
|
||||||
|
|
||||||
def subscribe(self, notifyFunction):
|
|
||||||
self.subscribers.append(functools.partial(notifyFunction, "LightningWorker"))
|
|
||||||
|
|
||||||
async def readJson(reader):
|
|
||||||
data = b""
|
|
||||||
while asyncio.get_event_loop().is_running():
|
|
||||||
newlines = sum(1 if x == b"\n"[0] else 0 for x in data)
|
|
||||||
if newlines > 1: print("Too many newlines in Electrum/lightning.py!", data)
|
|
||||||
try:
|
|
||||||
return json.loads(data.decode("ascii")) # decoding for python3.5 compat
|
|
||||||
except ValueError:
|
|
||||||
try:
|
|
||||||
data += await asyncio.wait_for(reader.read(1), 1)
|
|
||||||
except TimeoutError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
globLock = None
|
|
||||||
|
|
||||||
async def readReqAndReply(obj, writer, netAndWalLock):
|
|
||||||
global globLock
|
|
||||||
methods = [
|
|
||||||
# SecretKeyRing
|
|
||||||
DerivePrivKey,
|
|
||||||
DeriveNextKey,
|
|
||||||
DeriveKey,
|
|
||||||
ScalarMult
|
|
||||||
# Signer / BlockchainIO
|
|
||||||
,ConfirmedBalance
|
|
||||||
,NewAddress
|
|
||||||
,ListUnspentWitness
|
|
||||||
,WriteDb
|
|
||||||
,FetchInputInfo
|
|
||||||
,ComputeInputScript
|
|
||||||
,SignOutputRaw
|
|
||||||
,PublishTransaction
|
|
||||||
,LockOutpoint
|
|
||||||
,UnlockOutpoint
|
|
||||||
,ListTransactionDetails
|
|
||||||
,SendOutputs
|
|
||||||
,IsSynced
|
|
||||||
,SignMessage]
|
|
||||||
result = None
|
|
||||||
found = False
|
|
||||||
try:
|
|
||||||
for method in methods:
|
|
||||||
if method.__name__ == obj["method"]:
|
|
||||||
params = obj["params"][0]
|
|
||||||
print("calling method", obj["method"], "with", params)
|
|
||||||
globLock = netAndWalLock
|
|
||||||
netAndWalLock.acquire()
|
|
||||||
if asyncio.iscoroutinefunction(method):
|
|
||||||
result = await method(params)
|
|
||||||
else:
|
|
||||||
result = method(params)
|
|
||||||
netAndWalLock.release()
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
traceback.print_exc()
|
|
||||||
print("exception while calling method", obj["method"])
|
|
||||||
writer.write(json.dumps({"id":obj["id"],"error": {"code": -32002, "message": traceback.format_exc()}}).encode("ascii") + b"\n")
|
|
||||||
await writer.drain()
|
|
||||||
else:
|
|
||||||
if not found:
|
|
||||||
# TODO assumes obj has id
|
|
||||||
writer.write(json.dumps({"id":obj["id"],"error": {"code": -32601, "message": "invalid method"}}).encode("ascii") + b"\n")
|
|
||||||
else:
|
|
||||||
print("result was", result)
|
|
||||||
if result is None:
|
|
||||||
result = "{}"
|
|
||||||
try:
|
|
||||||
assert type({}) is type(json.loads(result))
|
|
||||||
except:
|
|
||||||
traceback.print_exc()
|
|
||||||
print("wrong method implementation")
|
|
||||||
writer.write(json.dumps({"id":obj["id"],"error": {"code": -32000, "message": "wrong return type in electrum-lightning-hub"}}).encode("ascii") + b"\n")
|
|
||||||
else:
|
|
||||||
writer.write(json.dumps({"id":obj["id"],"result": result}).encode("ascii") + b"\n")
|
|
||||||
await writer.drain()
|
|
||||||
|
|
||||||
def privKeyForPubKey(pubKey):
|
|
||||||
global globalIdx
|
|
||||||
priv_keys = WALLET.storage.get("lightning_extra_keys", [])
|
|
||||||
for i in priv_keys:
|
|
||||||
candidate = EC_KEY(i.to_bytes(32, "big"))
|
|
||||||
if pubkFromECKEY(candidate) == pubKey:
|
|
||||||
return candidate
|
|
||||||
|
|
||||||
attemptKeyIdx = globalIdx - 1
|
|
||||||
while attemptKeyIdx >= 0:
|
|
||||||
attemptPrivKey = fetchPrivKey(None, 9000, attemptKeyIdx)
|
|
||||||
attempt = pubkFromECKEY(attemptPrivKey)
|
|
||||||
if attempt == pubKey:
|
|
||||||
return attemptPrivKey
|
|
||||||
attemptKeyIdx -= 1
|
|
||||||
|
|
||||||
adr = bitcoin.pubkey_to_address('p2wpkh', binascii.hexlify(pubKey).decode("utf-8"))
|
|
||||||
pri, redeem_script = WALLET.export_private_key(adr, None)
|
|
||||||
|
|
||||||
if redeem_script:
|
|
||||||
print("ignoring redeem script", redeem_script)
|
|
||||||
|
|
||||||
typ, pri, compressed = bitcoin.deserialize_privkey(pri)
|
|
||||||
return EC_KEY(pri)
|
|
||||||
|
|
||||||
#assert False, "could not find private key for pubkey {} hex={}".format(pubKey, binascii.hexlify(pubKey).decode("ascii"))
|
|
||||||
|
|
||||||
def derivePrivKey(keyDesc):
|
|
||||||
keyDescFam = keyDesc.keyLocator.family
|
|
||||||
keyDescIdx = keyDesc.keyLocator.index
|
|
||||||
keyDescPubKey = keyDesc.pubKey
|
|
||||||
privKey = None
|
|
||||||
|
|
||||||
if len(keyDescPubKey) != 0:
|
|
||||||
return privKeyForPubKey(keyDescPubKey)
|
|
||||||
|
|
||||||
return fetchPrivKey(None, keyDescFam, keyDescIdx)
|
|
||||||
|
|
||||||
def DerivePrivKey(json):
|
|
||||||
req = rpc_pb2.DerivePrivKeyRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
m = rpc_pb2.DerivePrivKeyResponse()
|
|
||||||
|
|
||||||
m.privKey = derivePrivKey(req.keyDescriptor).secret.to_bytes(32, "big")
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
globalIdx = None
|
|
||||||
|
|
||||||
def DeriveNextKey(json):
|
|
||||||
global globalIdx
|
|
||||||
req = rpc_pb2.DeriveNextKeyRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
family = req.keyFamily
|
|
||||||
|
|
||||||
m = rpc_pb2.DeriveNextKeyResponse()
|
|
||||||
|
|
||||||
# lnd leaves these unset:
|
|
||||||
# source: https://github.com/lightningnetwork/lnd/pull/769/files#diff-c954f5135a8995b1a3dfa298101dd0efR160
|
|
||||||
#m.keyDescriptor.keyLocator.family =
|
|
||||||
#m.keyDescriptor.keyLocator.index =
|
|
||||||
|
|
||||||
m.keyDescriptor.pubKey = pubkFromECKEY(fetchPrivKey(None, 9000, globalIdx))
|
|
||||||
globalIdx += 1
|
|
||||||
WALLET.storage.put("lightning_global_key_index", globalIdx)
|
|
||||||
WALLET.storage.write()
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def DeriveKey(json):
|
|
||||||
req = rpc_pb2.DeriveKeyRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
family = req.keyLocator.family
|
|
||||||
idx = req.keyLocator.index
|
|
||||||
|
|
||||||
m = rpc_pb2.DeriveKeyResponse()
|
|
||||||
|
|
||||||
#lnd sets these to parameter values
|
|
||||||
m.keyDescriptor.keyLocator.family = family
|
|
||||||
m.keyDescriptor.keyLocator.index = index
|
|
||||||
|
|
||||||
m.keyDescriptor.pubKey = pubkFromECKEY(fetchPrivKey(None, family, index))
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
#// ScalarMult performs a scalar multiplication (ECDH-like operation) between
|
|
||||||
#// the target key descriptor and remote public key. The output returned will be
|
|
||||||
#// the sha256 of the resulting shared point serialized in compressed format. If
|
|
||||||
#// k is our private key, and P is the public key, we perform the following
|
|
||||||
#// operation:
|
|
||||||
#//
|
|
||||||
#// sx := k*P s := sha256(sx.SerializeCompressed())
|
|
||||||
def ScalarMult(json):
|
|
||||||
req = rpc_pb2.ScalarMultRequest()
|
|
||||||
json_format.Parse(json, req)
|
|
||||||
|
|
||||||
privKey = derivePrivKey(req.keyDescriptor)
|
|
||||||
|
|
||||||
point = bitcoin.ser_to_point(req.pubKey)
|
|
||||||
|
|
||||||
point = point * privKey.secret
|
|
||||||
|
|
||||||
c = hashlib.sha256()
|
|
||||||
c.update(bitcoin.point_to_ser(point, True))
|
|
||||||
|
|
||||||
m = rpc_pb2.ScalarMultResponse()
|
|
||||||
|
|
||||||
m.hashResult = c.digest()
|
|
||||||
|
|
||||||
msg = json_format.MessageToJson(m)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def pubkFromECKEY(eckey):
|
|
||||||
return bytes(bytearray.fromhex(eckey.get_public_key(True))) #compressed=True
|
|
|
@ -1,46 +0,0 @@
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
# source: google/api/annotations.proto
|
|
||||||
|
|
||||||
import sys
|
|
||||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
|
||||||
from google.protobuf import descriptor as _descriptor
|
|
||||||
from google.protobuf import message as _message
|
|
||||||
from google.protobuf import reflection as _reflection
|
|
||||||
from google.protobuf import symbol_database as _symbol_database
|
|
||||||
from google.protobuf import descriptor_pb2
|
|
||||||
# @@protoc_insertion_point(imports)
|
|
||||||
|
|
||||||
_sym_db = _symbol_database.Default()
|
|
||||||
|
|
||||||
|
|
||||||
from . import http_pb2 as google_dot_api_dot_http__pb2
|
|
||||||
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
|
||||||
name='google/api/annotations.proto',
|
|
||||||
package='google.api',
|
|
||||||
syntax='proto3',
|
|
||||||
serialized_pb=_b('\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc\" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3')
|
|
||||||
,
|
|
||||||
dependencies=[google_dot_api_dot_http__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
|
|
||||||
|
|
||||||
|
|
||||||
HTTP_FIELD_NUMBER = 72295728
|
|
||||||
http = _descriptor.FieldDescriptor(
|
|
||||||
name='http', full_name='google.api.http', index=0,
|
|
||||||
number=72295728, type=11, cpp_type=10, label=1,
|
|
||||||
has_default_value=False, default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=True, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR)
|
|
||||||
|
|
||||||
DESCRIPTOR.extensions_by_name['http'] = http
|
|
||||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
|
||||||
|
|
||||||
http.message_type = google_dot_api_dot_http__pb2._HTTPRULE
|
|
||||||
google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(http)
|
|
||||||
|
|
||||||
DESCRIPTOR.has_options = True
|
|
||||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\016com.google.apiB\020AnnotationsProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI'))
|
|
||||||
# @@protoc_insertion_point(module_scope)
|
|
|
@ -1,236 +0,0 @@
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
# source: google/api/http.proto
|
|
||||||
|
|
||||||
import sys
|
|
||||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
|
||||||
from google.protobuf import descriptor as _descriptor
|
|
||||||
from google.protobuf import message as _message
|
|
||||||
from google.protobuf import reflection as _reflection
|
|
||||||
from google.protobuf import symbol_database as _symbol_database
|
|
||||||
from google.protobuf import descriptor_pb2
|
|
||||||
# @@protoc_insertion_point(imports)
|
|
||||||
|
|
||||||
_sym_db = _symbol_database.Default()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
|
||||||
name='google/api/http.proto',
|
|
||||||
package='google.api',
|
|
||||||
syntax='proto3',
|
|
||||||
serialized_pb=_b('\n\x15google/api/http.proto\x12\ngoogle.api\"+\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\"\xea\x01\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern\"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
_HTTP = _descriptor.Descriptor(
|
|
||||||
name='Http',
|
|
||||||
full_name='google.api.Http',
|
|
||||||
filename=None,
|
|
||||||
file=DESCRIPTOR,
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='rules', full_name='google.api.Http.rules', index=0,
|
|
||||||
number=1, type=11, cpp_type=10, label=3,
|
|
||||||
has_default_value=False, default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[],
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
is_extendable=False,
|
|
||||||
syntax='proto3',
|
|
||||||
extension_ranges=[],
|
|
||||||
oneofs=[
|
|
||||||
],
|
|
||||||
serialized_start=37,
|
|
||||||
serialized_end=80,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_HTTPRULE = _descriptor.Descriptor(
|
|
||||||
name='HttpRule',
|
|
||||||
full_name='google.api.HttpRule',
|
|
||||||
filename=None,
|
|
||||||
file=DESCRIPTOR,
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='selector', full_name='google.api.HttpRule.selector', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='get', full_name='google.api.HttpRule.get', index=1,
|
|
||||||
number=2, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='put', full_name='google.api.HttpRule.put', index=2,
|
|
||||||
number=3, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='post', full_name='google.api.HttpRule.post', index=3,
|
|
||||||
number=4, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='delete', full_name='google.api.HttpRule.delete', index=4,
|
|
||||||
number=5, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='patch', full_name='google.api.HttpRule.patch', index=5,
|
|
||||||
number=6, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='custom', full_name='google.api.HttpRule.custom', index=6,
|
|
||||||
number=8, type=11, cpp_type=10, label=1,
|
|
||||||
has_default_value=False, default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='body', full_name='google.api.HttpRule.body', index=7,
|
|
||||||
number=7, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='additional_bindings', full_name='google.api.HttpRule.additional_bindings', index=8,
|
|
||||||
number=11, type=11, cpp_type=10, label=3,
|
|
||||||
has_default_value=False, default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[],
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
is_extendable=False,
|
|
||||||
syntax='proto3',
|
|
||||||
extension_ranges=[],
|
|
||||||
oneofs=[
|
|
||||||
_descriptor.OneofDescriptor(
|
|
||||||
name='pattern', full_name='google.api.HttpRule.pattern',
|
|
||||||
index=0, containing_type=None, fields=[]),
|
|
||||||
],
|
|
||||||
serialized_start=83,
|
|
||||||
serialized_end=317,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_CUSTOMHTTPPATTERN = _descriptor.Descriptor(
|
|
||||||
name='CustomHttpPattern',
|
|
||||||
full_name='google.api.CustomHttpPattern',
|
|
||||||
filename=None,
|
|
||||||
file=DESCRIPTOR,
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='kind', full_name='google.api.CustomHttpPattern.kind', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
_descriptor.FieldDescriptor(
|
|
||||||
name='path', full_name='google.api.CustomHttpPattern.path', index=1,
|
|
||||||
number=2, type=9, cpp_type=9, label=1,
|
|
||||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None, file=DESCRIPTOR),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[],
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
is_extendable=False,
|
|
||||||
syntax='proto3',
|
|
||||||
extension_ranges=[],
|
|
||||||
oneofs=[
|
|
||||||
],
|
|
||||||
serialized_start=319,
|
|
||||||
serialized_end=366,
|
|
||||||
)
|
|
||||||
|
|
||||||
_HTTP.fields_by_name['rules'].message_type = _HTTPRULE
|
|
||||||
_HTTPRULE.fields_by_name['custom'].message_type = _CUSTOMHTTPPATTERN
|
|
||||||
_HTTPRULE.fields_by_name['additional_bindings'].message_type = _HTTPRULE
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['get'])
|
|
||||||
_HTTPRULE.fields_by_name['get'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['put'])
|
|
||||||
_HTTPRULE.fields_by_name['put'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['post'])
|
|
||||||
_HTTPRULE.fields_by_name['post'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['delete'])
|
|
||||||
_HTTPRULE.fields_by_name['delete'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['patch'])
|
|
||||||
_HTTPRULE.fields_by_name['patch'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
_HTTPRULE.oneofs_by_name['pattern'].fields.append(
|
|
||||||
_HTTPRULE.fields_by_name['custom'])
|
|
||||||
_HTTPRULE.fields_by_name['custom'].containing_oneof = _HTTPRULE.oneofs_by_name['pattern']
|
|
||||||
DESCRIPTOR.message_types_by_name['Http'] = _HTTP
|
|
||||||
DESCRIPTOR.message_types_by_name['HttpRule'] = _HTTPRULE
|
|
||||||
DESCRIPTOR.message_types_by_name['CustomHttpPattern'] = _CUSTOMHTTPPATTERN
|
|
||||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
|
||||||
|
|
||||||
Http = _reflection.GeneratedProtocolMessageType('Http', (_message.Message,), dict(
|
|
||||||
DESCRIPTOR = _HTTP,
|
|
||||||
__module__ = 'google.api.http_pb2'
|
|
||||||
# @@protoc_insertion_point(class_scope:google.api.Http)
|
|
||||||
))
|
|
||||||
_sym_db.RegisterMessage(Http)
|
|
||||||
|
|
||||||
HttpRule = _reflection.GeneratedProtocolMessageType('HttpRule', (_message.Message,), dict(
|
|
||||||
DESCRIPTOR = _HTTPRULE,
|
|
||||||
__module__ = 'google.api.http_pb2'
|
|
||||||
# @@protoc_insertion_point(class_scope:google.api.HttpRule)
|
|
||||||
))
|
|
||||||
_sym_db.RegisterMessage(HttpRule)
|
|
||||||
|
|
||||||
CustomHttpPattern = _reflection.GeneratedProtocolMessageType('CustomHttpPattern', (_message.Message,), dict(
|
|
||||||
DESCRIPTOR = _CUSTOMHTTPPATTERN,
|
|
||||||
__module__ = 'google.api.http_pb2'
|
|
||||||
# @@protoc_insertion_point(class_scope:google.api.CustomHttpPattern)
|
|
||||||
))
|
|
||||||
_sym_db.RegisterMessage(CustomHttpPattern)
|
|
||||||
|
|
||||||
|
|
||||||
DESCRIPTOR.has_options = True
|
|
||||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI'))
|
|
||||||
# @@protoc_insertion_point(module_scope)
|
|
2895
lib/ln/rpc_pb2.py
2895
lib/ln/rpc_pb2.py
File diff suppressed because one or more lines are too long
|
@ -569,7 +569,7 @@ def is_synced(network):
|
||||||
return synced
|
return synced
|
||||||
|
|
||||||
class Peer(PrintError):
|
class Peer(PrintError):
|
||||||
def __init__(self, host, port, pubkey, privkey, request_initial_sync=False, network=None):
|
def __init__(self, host, port, pubkey, privkey, network, request_initial_sync=False):
|
||||||
self.update_add_htlc_event = asyncio.Event()
|
self.update_add_htlc_event = asyncio.Event()
|
||||||
self.channel_update_event = asyncio.Event()
|
self.channel_update_event = asyncio.Event()
|
||||||
self.host = host
|
self.host = host
|
||||||
|
@ -648,7 +648,7 @@ class Peer(PrintError):
|
||||||
# act 1
|
# act 1
|
||||||
self.writer.write(msg)
|
self.writer.write(msg)
|
||||||
rspns = await self.reader.read(2**10)
|
rspns = await self.reader.read(2**10)
|
||||||
assert len(rspns) == 50
|
assert len(rspns) == 50, "Lightning handshake act 1 response has bad length, are you sure this is the right pubkey? " + str(bh2u(self.pubkey))
|
||||||
hver, alice_epub, tag = rspns[0], rspns[1:34], rspns[34:]
|
hver, alice_epub, tag = rspns[0], rspns[1:34], rspns[34:]
|
||||||
assert bytes([hver]) == hs.handshake_version
|
assert bytes([hver]) == hs.handshake_version
|
||||||
# act 2
|
# act 2
|
||||||
|
@ -875,7 +875,7 @@ class Peer(PrintError):
|
||||||
print('remote dust limit', remote_config.dust_limit_sat)
|
print('remote dust limit', remote_config.dust_limit_sat)
|
||||||
assert remote_config.dust_limit_sat < 600
|
assert remote_config.dust_limit_sat < 600
|
||||||
assert int.from_bytes(payload['htlc_minimum_msat'], 'big') < 600 * 1000
|
assert int.from_bytes(payload['htlc_minimum_msat'], 'big') < 600 * 1000
|
||||||
assert remote_config.max_htlc_value_in_flight_msat >= 500 * 1000 * 1000, remote_config.max_htlc_value_in_flight_msat
|
assert remote_config.max_htlc_value_in_flight_msat >= 198 * 1000 * 1000, remote_config.max_htlc_value_in_flight_msat
|
||||||
self.print_error('remote delay', remote_config.to_self_delay)
|
self.print_error('remote delay', remote_config.to_self_delay)
|
||||||
self.print_error('funding_txn_minimum_depth', funding_txn_minimum_depth)
|
self.print_error('funding_txn_minimum_depth', funding_txn_minimum_depth)
|
||||||
# create funding tx
|
# create funding tx
|
||||||
|
@ -1370,32 +1370,69 @@ class Peer(PrintError):
|
||||||
channel_id = int.from_bytes(payload["channel_id"], 'big')
|
channel_id = int.from_bytes(payload["channel_id"], 'big')
|
||||||
self.revoke_and_ack[channel_id].set_result(payload)
|
self.revoke_and_ack[channel_id].set_result(payload)
|
||||||
|
|
||||||
|
class ConsoleInterface:
|
||||||
|
def __init__(self, lnworker):
|
||||||
|
self.lnworker = lnworker
|
||||||
|
def __repr__(self):
|
||||||
|
return str(dir(self))
|
||||||
|
|
||||||
# replacement for lightningCall
|
# replacement for lightningCall
|
||||||
class LNWorker:
|
class LNWorker:
|
||||||
|
|
||||||
def __init__(self, wallet, network):
|
def __init__(self, wallet, network):
|
||||||
self.privkey = H256(b"0123456789")
|
|
||||||
self.wallet = wallet
|
self.wallet = wallet
|
||||||
self.network = network
|
self.network = network
|
||||||
|
self.privkey = H256(b"0123456789")
|
||||||
self.config = network.config
|
self.config = network.config
|
||||||
self.peers = {}
|
self.peers = {}
|
||||||
self.channels = {}
|
self.channels = {}
|
||||||
peer_list = network.config.get('lightning_peers', node_list)
|
peer_list = network.config.get('lightning_peers', node_list)
|
||||||
|
print("Adding", len(peer_list), "peers")
|
||||||
for host, port, pubkey in peer_list:
|
for host, port, pubkey in peer_list:
|
||||||
self.add_peer(host, port, pubkey)
|
self.add_peer(host, port, pubkey)
|
||||||
|
self.console_interface = ConsoleInterface(self)
|
||||||
|
|
||||||
def add_peer(self, host, port, pubkey):
|
def add_peer(self, host, port, pubkey):
|
||||||
peer = Peer(host, int(port), binascii.unhexlify(pubkey), self.privkey)
|
peer = Peer(host, int(port), binascii.unhexlify(pubkey), self.privkey, self.network)
|
||||||
self.network.futures.append(asyncio.run_coroutine_threadsafe(peer.main_loop(), asyncio.get_event_loop()))
|
self.network.futures.append(asyncio.run_coroutine_threadsafe(peer.main_loop(), asyncio.get_event_loop()))
|
||||||
self.peers[pubkey] = peer
|
self.peers[pubkey] = peer
|
||||||
|
|
||||||
def open_channel(self, pubkey, amount, push_msat, password):
|
def open_channel(self, peer, amount, push_msat, password):
|
||||||
keystore = self.wallet.keystore
|
|
||||||
peer = self.peers.get(pubkey)
|
|
||||||
coro = peer.channel_establishment_flow(self.wallet, self.config, password, amount, push_msat, temp_channel_id=os.urandom(32))
|
coro = peer.channel_establishment_flow(self.wallet, self.config, password, amount, push_msat, temp_channel_id=os.urandom(32))
|
||||||
fut = asyncio.run_coroutine_threadsafe(coro, self.network.asyncio_loop)
|
return asyncio.run_coroutine_threadsafe(coro, self.network.asyncio_loop)
|
||||||
|
|
||||||
|
def open_channel_from_other_thread(self, node_id, local_amt, push_amt, emit_function, get_password):
|
||||||
|
pw = get_password()
|
||||||
|
if pw is None:
|
||||||
|
# user pressed cancel
|
||||||
|
return
|
||||||
|
# TODO this could race on peers
|
||||||
|
peer = self.peers.get(node_id)
|
||||||
|
if peer is None:
|
||||||
|
if len(self.peers) != 1:
|
||||||
|
print("Peer not found, and peer list is empty or has multiple peers.")
|
||||||
|
return
|
||||||
|
peer = next(iter(self.peers.values()))
|
||||||
|
fut = self.open_channel(peer, local_amt, push_amt, None if pw == "" else pw)
|
||||||
|
chan = fut.result()
|
||||||
|
# https://api.lightning.community/#listchannels
|
||||||
|
std_chan = {"chan_id": chan.channel_id}
|
||||||
|
emit_function({"channels": [std_chan]})
|
||||||
|
|
||||||
|
def subscribe_payment_received_from_other_thread(self, emit_function):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def subscribe_channel_list_updates_from_other_thread(self, emit_function):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def subscribe_single_channel_update_from_other_thread(self, emit_function):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def add_invoice_from_other_thread(self, amt):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def subscribe_invoice_added_from_other_thread(self, emit_function):
|
||||||
|
pass
|
||||||
|
|
||||||
class ChannelInfo(PrintError):
|
class ChannelInfo(PrintError):
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ if __name__ == "__main__":
|
||||||
privkey = wallet.storage.get("channels_privkey", None)
|
privkey = wallet.storage.get("channels_privkey", None)
|
||||||
assert privkey is not None
|
assert privkey is not None
|
||||||
privkey = bfh(privkey)
|
privkey = bfh(privkey)
|
||||||
peer = Peer(host, port, pubkey, privkey, request_initial_sync=True, network=network)
|
peer = Peer(host, port, pubkey, privkey, network, request_initial_sync=True)
|
||||||
network.futures.append(asyncio.run_coroutine_threadsafe(peer.main_loop(), network.asyncio_loop))
|
network.futures.append(asyncio.run_coroutine_threadsafe(peer.main_loop(), network.asyncio_loop))
|
||||||
|
|
||||||
funding_satoshis = 2000000
|
funding_satoshis = 2000000
|
||||||
|
|
|
@ -1,15 +0,0 @@
|
||||||
#!/bin/sh -ex
|
|
||||||
if [ ! -d $HOME/go/src/github.com/grpc-ecosystem ]; then
|
|
||||||
# from readme in https://github.com/grpc-ecosystem/grpc-gateway
|
|
||||||
go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway
|
|
||||||
go get -u github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger
|
|
||||||
go get -u github.com/golang/protobuf/protoc-gen-go
|
|
||||||
fi
|
|
||||||
if [ ! -d $HOME/go/src/github.com/lightningnetwork/lnd ]; then
|
|
||||||
echo "You need an lnd with electrum-bridge (ysangkok/lnd maybe?) checked out since we implement the interface from there, and need it to generate code"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
mkdir -p lib/ln || true
|
|
||||||
touch lib/__init__.py
|
|
||||||
~/go/bin/protoc -I$HOME/include -I$HOME/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis --python_out=lib/ln $HOME/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/api/*.proto
|
|
||||||
python3 -m grpc_tools.protoc -I $HOME/go/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis --proto_path $HOME/go/src/github.com/lightningnetwork/lnd/electrum-bridge --python_out=lib/ln --grpc_python_out=lib/ln ~/go/src/github.com/lightningnetwork/lnd/electrum-bridge/rpc.proto
|
|
Loading…
Add table
Reference in a new issue