Compare commits

..

No commits in common. "master" and "3.2.3" have entirely different histories.

396 changed files with 15395 additions and 45075 deletions

30
.gitignore vendored
View file

@ -4,39 +4,19 @@
build/ build/
dist/ dist/
*.egg/ *.egg/
contrib/pyinstaller/
Electrum.egg-info/ Electrum.egg-info/
electrum/locale/ electrum/gui/qt/icons_rc.py
locale/
.devlocaltmp/ .devlocaltmp/
*_trial_temp *_trial_temp
packages packages
env/ env/
.tox/
.buildozer/ .buildozer/
bin/ bin/
/app.fil /app.fil
.idea
.mypy_cache
.vscode
# icons # tox files
electrum/gui/kivy/theming/light-0.png
electrum/gui/kivy/theming/light-1.png
electrum/gui/kivy/theming/light.atlas
# tests/tox
.tox/
.cache/ .cache/
.coverage .coverage
.pytest_cache
# build workspaces
contrib/build-wine/tmp/
contrib/build-wine/fresh_clone/
contrib/build-linux/appimage/build/
contrib/build-linux/appimage/.cache/
contrib/android_debug.keystore
# shared objects
electrum/*.so
electrum/*.so.0
electrum/*.dll
electrum/*.dylib

9
.gitmodules vendored
View file

@ -1,9 +1,6 @@
[submodule "contrib/deterministic-build/electrum-icons"]
path = contrib/deterministic-build/electrum-icons
url = https://github.com/spesmilo/electrum-icons
[submodule "contrib/deterministic-build/electrum-locale"] [submodule "contrib/deterministic-build/electrum-locale"]
path = contrib/deterministic-build/electrum-locale path = contrib/deterministic-build/electrum-locale
url = https://github.com/spesmilo/electrum-locale url = https://github.com/spesmilo/electrum-locale
[submodule "contrib/CalinsQRReader"]
path = contrib/osx/CalinsQRReader
url = https://github.com/spesmilo/CalinsQRReader
[submodule "electrum/www"]
path = electrum/www
url = https://github.com/spesmilo/electrum-http.git

View file

@ -1,19 +1,16 @@
sudo: true
dist: xenial dist: xenial
language: python language: python
python: python:
- 3.5
- 3.6 - 3.6
- 3.7 - 3.7
- 3.8
git:
depth: false
addons: addons:
apt: apt:
sources: sources:
- sourceline: 'ppa:tah83/secp256k1' - sourceline: 'ppa:tah83/secp256k1'
packages: packages:
- libsecp256k1-0 - libsecp256k1-0
before_install:
- git tag
install: install:
- pip install -r contrib/requirements/requirements-travis.txt - pip install -r contrib/requirements/requirements-travis.txt
cache: cache:
@ -23,33 +20,12 @@ cache:
script: script:
- tox - tox
after_success: after_success:
- if [ "$TRAVIS_BRANCH" = "master" ]; then pip install requests && contrib/push_locale; fi - if [ "$TRAVIS_BRANCH" = "master" ]; then pip install pycurl requests && contrib/make_locale; fi
- coveralls - coveralls
jobs: jobs:
include: include:
- name: "Regtest functional tests"
language: python
python: 3.7
before_install:
- sudo add-apt-repository -y ppa:bitcoin/bitcoin
- sudo apt-get -qq update
- sudo apt-get install -yq bitcoind
install:
- pip install -r contrib/requirements/requirements.txt
- pip install electrumx
before_script:
- electrum/tests/regtest/start_bitcoind.sh
- electrum/tests/regtest/start_electrumx.sh
script:
- python -m unittest electrum/tests/regtest.py
after_success: True
- name: "Flake8 linter tests"
language: python
install: pip install flake8
script: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
- stage: binary builds - stage: binary builds
if: branch = master sudo: true
name: "Windows build"
language: c language: c
python: false python: false
env: env:
@ -57,56 +33,25 @@ jobs:
services: services:
- docker - docker
install: install:
- sudo docker build --no-cache -t electrum-wine-builder-img ./contrib/build-wine/ - sudo docker build --no-cache -t electrum-wine-builder-img ./contrib/build-wine/docker/
script: script:
- sudo docker run --name electrum-wine-builder-cont -v $PWD:/opt/wine64/drive_c/electrum --rm --workdir /opt/wine64/drive_c/electrum/contrib/build-wine electrum-wine-builder-img ./build.sh - sudo docker run --name electrum-wine-builder-cont -v $PWD:/opt/wine64/drive_c/electrum --rm --workdir /opt/wine64/drive_c/electrum/contrib/build-wine electrum-wine-builder-img ./build.sh
after_success: true after_success: true
- if: branch = master - os: osx
name: "Android build"
language: python
python: 3.7
services:
- docker
install:
- pip install requests && ./contrib/pull_locale
- ./contrib/make_packages
- sudo docker build --no-cache -t electrum-android-builder-img electrum/gui/kivy/tools
script:
- sudo chown -R 1000:1000 .
# Output something every minute or Travis kills the job
- while sleep 60; do echo "=====[ $SECONDS seconds still running ]====="; done &
- sudo docker run -it -u 1000:1000 --rm --name electrum-android-builder-cont --env CI=true -v $PWD:/home/user/wspace/electrum --workdir /home/user/wspace/electrum electrum-android-builder-img ./contrib/make_apk
# kill background sleep loop
- kill %1
- ls -la bin
- if [ $(ls bin | grep -c Electrum-*) -eq 0 ]; then exit 1; fi
after_success: true
- if: branch = master
name: "MacOS build"
os: osx
language: c language: c
env: env:
- TARGET_OS=macOS - TARGET_OS=macOS
python: false python: false
install: install:
- git fetch --all --tags - git fetch --all --tags
script: ./contrib/osx/make_osx - git fetch origin --unshallow
script: ./contrib/build-osx/make_osx
after_script: ls -lah dist && md5 dist/* after_script: ls -lah dist && md5 dist/*
after_success: true after_success: true
- if: branch = master
name: "AppImage build"
language: c
python: false
services:
- docker
install:
- sudo docker build --no-cache -t electrum-appimage-builder-img ./contrib/build-linux/appimage/
script:
- sudo docker run --name electrum-appimage-builder-cont -v $PWD:/opt/electrum --rm --workdir /opt/electrum/contrib/build-linux/appimage electrum-appimage-builder-img ./build.sh
after_success: true
- stage: release check - stage: release check
install: install:
- git fetch --all --tags - git fetch --all --tags
- git fetch origin --unshallow
script: script:
- ./contrib/deterministic-build/check_submodules.sh - ./contrib/deterministic-build/check_submodules.sh
after_success: true after_success: true

22
Info.plist Normal file
View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleURLTypes</key>
<array>
<dict>
<key>CFBundleURLName</key>
<string>bitcoin</string>
<key>CFBundleURLSchemes</key>
<array>
<string>bitcoin</string>
</array>
</dict>
</array>
<key>LSArchitecturePriority</key>
<array>
<string>x86_64</string>
<string>i386</string>
</array>
</dict>
</plist>

View file

@ -1,5 +1,6 @@
include LICENCE RELEASE-NOTES AUTHORS include LICENCE RELEASE-NOTES AUTHORS
include README.rst include README.rst
include electrum.conf.sample
include electrum.desktop include electrum.desktop
include *.py include *.py
include run_electrum include run_electrum
@ -7,15 +8,11 @@ include contrib/requirements/requirements.txt
include contrib/requirements/requirements-hw.txt include contrib/requirements/requirements-hw.txt
recursive-include packages *.py recursive-include packages *.py
recursive-include packages cacert.pem recursive-include packages cacert.pem
include icons.qrc
graft icons
graft electrum graft electrum
prune electrum/tests prune electrum/tests
graft contrib/udev
exclude electrum/*.so
exclude electrum/*.so.0
global-exclude __pycache__ global-exclude __pycache__
global-exclude *.py[co~] global-exclude *.py[co]
global-exclude *.py.orig
global-exclude *.py.rej

View file

@ -1,37 +1,38 @@
LBRY Vault - Lightweight LBRY Credit client Electrum - Lightweight Bitcoin client
===================================== =====================================
Guides
=============== ::
Guide for Ledger devices -
https://kodxana.github.io/LBRY-Vault-website/ Licence: MIT Licence
Author: Thomas Voegtlin
Language: Python
Homepage: https://electrum.org/
.. image:: https://travis-ci.org/spesmilo/electrum.svg?branch=master
:target: https://travis-ci.org/spesmilo/electrum
:alt: Build Status
.. image:: https://coveralls.io/repos/github/spesmilo/electrum/badge.svg?branch=master
:target: https://coveralls.io/github/spesmilo/electrum?branch=master
:alt: Test coverage statistics
.. image:: https://d322cqt584bo4o.cloudfront.net/electrum/localized.svg
:target: https://crowdin.com/project/electrum
:alt: Help translate Electrum online
Getting started Getting started
=============== ===============
LBRY Vault itself is pure Python, and so are most of the required dependencies. Electrum is a pure python application. If you want to use the
Qt interface, install the Qt dependencies::
Non-python dependencies
-----------------------
If you want to use the Qt interface, install the Qt dependencies::
sudo apt-get install python3-pyqt5 sudo apt-get install python3-pyqt5
For elliptic curve operations, libsecp256k1 is a required dependency::
sudo apt-get install libsecp256k1-0
Alternatively, when running from a cloned repository, a script is provided to build
libsecp256k1 yourself::
./contrib/make_libsecp256k1.sh
Running from tar.gz
-------------------
If you downloaded the official package (tar.gz), you can run If you downloaded the official package (tar.gz), you can run
LBRY Vault from its root directory without installing it on your Electrum from its root directory, without installing it on your
system; all the python dependencies are included in the 'packages' system; all the python dependencies are included in the 'packages'
directory. To run Electrum from its root directory, just do:: directory. To run Electrum from its root directory, just do::
@ -39,30 +40,40 @@ directory. To run Electrum from its root directory, just do::
You can also install Electrum on your system, by running this command:: You can also install Electrum on your system, by running this command::
sudo apt-get install python3-setuptools python3-pip sudo apt-get install python3-setuptools
python3 -m pip install --user . pip3 install .[fast]
This will download and install the Python dependencies used by This will download and install the Python dependencies used by
LBRY Vault instead of using the 'packages' directory. Electrum, instead of using the 'packages' directory.
The 'fast' extra contains some optional dependencies that we think
are often useful but they are not strictly needed.
If you cloned the git repository, you need to compile extra files If you cloned the git repository, you need to compile extra files
before you can run LBRY Vault. Read the next section, "Development before you can run Electrum. Read the next section, "Development
version". Version".
Development version Development version
------------------- ===================
Check out the code from GitHub:: Check out the code from GitHub::
git clone git://github.com/kodxana/LBRY-Vault.git git clone git://github.com/spesmilo/electrum.git
cd electrum cd electrum
git submodule update --init
Run install (this should install dependencies):: Run install (this should install dependencies)::
python3 -m pip install --user . pip3 install .[fast]
Render the SVG icons to PNGs (optional)::
for i in lock unlock confirmed status_lagging status_disconnected status_connected_proxy status_connected status_waiting preferences; do convert -background none icons/$i.svg icons/$i.png; done
Compile the icons file for Qt::
sudo apt-get install pyqt5-dev-tools
pyrcc5 icons.qrc -o electrum/gui/qt/icons_rc.py
Compile the protobuf description file:: Compile the protobuf description file::
@ -72,7 +83,7 @@ Compile the protobuf description file::
Create translations (optional):: Create translations (optional)::
sudo apt-get install python-requests gettext sudo apt-get install python-requests gettext
./contrib/pull_locale ./contrib/make_locale
@ -80,31 +91,25 @@ Create translations (optional)::
Creating Binaries Creating Binaries
================= =================
Linux (tarball)
---------------
See :code:`contrib/build-linux/README.md`. To create binaries, create the 'packages' directory::
./contrib/make_packages
Linux (AppImage) This directory contains the python dependencies used by Electrum.
----------------
See :code:`contrib/build-linux/appimage/README.md`.
Mac OS X / macOS Mac OS X / macOS
---------------- --------
See :code:`contrib/osx/README.md`.
See `contrib/build-osx/`.
Windows Windows
------- -------
See :code:`contrib/build-wine/README.md`. See `contrib/build-wine/`.
Android Android
------- -------
See :code:`electrum/gui/kivy/Readme.md`. See `electrum/gui/kivy/Readme.md` file.

View file

@ -1,174 +1,3 @@
# Release 4.0 - (Not released yet; release notes are incomplete)
* Lightning Network
* Qt GUI: Separation between output selection and transaction finalization.
* Http PayServer can be configured from GUI
# Release 3.3.8 - (July 11, 2019)
* fix some bugs with recent bump fee (RBF) improvements (#5483, #5502)
* fix #5491: watch-only wallets could not bump fee in some cases
* appimage: URLs could not be opened on some desktop environments (#5425)
* faster tx signing for segwit inputs for really large txns (#5494)
* A few other minor bugfixes and usability improvements.
# Release 3.3.7 - (July 3, 2019)
* The AppImage Linux x86_64 binary and the Windows setup.exe
(so now all Windows binaries) are now built reproducibly.
* Bump fee (RBF) improvements:
Implemented a new fee-bump strategy that can add new inputs,
so now any tx can be fee-bumped (d0a4366). The old strategy
was to decrease the value of outputs (starting with change).
We will now try the new strategy first, and only use the old
as a fallback (needed e.g. when spending "Max").
* CoinChooser improvements:
- more likely to construct txs without change (when possible)
- less likely to construct txs with really small change (e864fa5)
- will now only spend negative effective value coins when
beneficial for privacy (cb69aa8)
* fix long-standing bug that broke wallets with >65k addresses (#5366)
* Windows binaries: we now build the PyInstaller boot loader ourselves,
as this seems to reduce anti-virus false positives (1d0f679)
* Android: (fix) BIP70 payment requests could not be paid (#5376)
* Android: allow copy-pasting partial transactions from/to clipboard
* Fix a performance regression for large wallets (c6a54f0)
* Qt: fix some high DPI issues related to text fields (37809be)
* Trezor:
- allow bypassing "too old firmware" error (#5391)
- use only the Bridge to scan devices if it is available (#5420)
* hw wallets: (known issue) on Win10-1903, some hw devices
(that also have U2F functionality) can only be detected with
Administrator privileges. (see #5420 and #5437)
A workaround is to run as Admin, or for Trezor to install the Bridge.
* Several other minor bugfixes and usability improvements.
# Release 3.3.6 - (May 16, 2019)
* qt: fix crash during 2FA wallet creation (#5334)
* fix synchronizer not to keep resubscribing to addresses of
already closed wallets (e415c0d9)
* fix removing addresses/keys from imported wallets (#4481)
* kivy: fix crash when aborting 2FA wallet creation (#5333)
* kivy: fix rare crash when changing exchange rate settings (#5329)
* A few other minor bugfixes and usability improvements.
# Release 3.3.5 - (May 9, 2019)
* The logging system has been overhauled (#5296).
Logs can now also optionally be written to disk, disabled by default.
* Fix a bug in synchronizer (#5122) where client could get stuck.
Also, show the progress of history sync in the GUI. (#5319)
* fix Revealer in Windows and MacOS binaries (#5027)
* fiat rate providers:
- added CoinGecko.com and CoinCap.io
- BitcoinAverage now only provides historical exchange rates for
paying customers. Changed default provider to CoinGecko.com (#5188)
* hardware wallets:
- Ledger: Nano X is now recognized (#5140)
- KeepKey:
- device was not getting detected using Windows binary (#5165)
- support firmware 6.0.0+ (#5205)
- Trezor: implemented "seedless" mode (#5118)
* Coin Control in Qt: implemented freezing individual UTXOs
in addition to freezing addresses (#5152)
* TrustedCoin (2FA wallets):
- better error messages (#5184)
- longer signing timeout (#5221)
* Kivy:
- fix bug with local transactions (#5156)
- allow selecting fiat rate providers without historical data (#5162)
* fix CPFP: the fees already paid by the parent were not included in
the calculation, so it always overestimated (#5244)
* Testnet: there is now a warning when the client is started in
testnet mode as there were a number of reports of users getting
scammed through social engineering (#5295)
* CoinChooser: performance of creating transactions has been improved
significantly for large wallets. (d56917f4)
* Importing/sweeping WIF keys: stricter checks (#4638, #5290)
* Electrum protocol: the client's "user agent" has been changed from
"3.3.5" to "electrum/3.3.5". Other libraries connecting to servers
can consider not "spoofing" to be Electrum. (#5246)
* Several other minor bugfixes and usability improvements.
# Release 3.3.4 - (February 13, 2019)
* AppImage: we now also distribute self-contained binaries for x86_64
Linux in the form of an AppImage (#5042). The Python interpreter,
PyQt5, libsecp256k1, PyCryptodomex, zbar, hidapi/libusb (including
hardware wallet libraries) are all bundled. Note that users of
hw wallets still need to set udev rules themselves.
* hw wallets: fix a regression during transaction signing that prompts
the user too many times for confirmations (commit 2729909)
* transactions now set nVersion to 2, to mimic Bitcoin Core
* fix Qt bug that made all hw wallets unusable on Windows 8.1 (#4960)
* fix bugs in wallet creation wizard that resulted in corrupted
wallets being created in rare cases (#5082, #5057)
* fix compatibility with Qt 5.12 (#5109)
# Release 3.3.3 - (January 25, 2019)
* Do not expose users to server error messages (#4968)
* Notify users of new releases. Release announcements must be signed,
and they are verified byElectrum using a hardcoded Bitcoin address.
* Hardware wallet fixes (#4991, #4993, #5006)
* Display only QR code in QRcode Window
* Fixed code signing on MacOS
* Randomise locktime of transactions
# Release 3.3.2 - (December 21, 2018)
* Fix Qt history export bug
* Improve network timeouts
* Prepend server transaction_broadcast error messages with
explanatory message. Render error messages as plain text.
# Release 3.3.1 - (December 20, 2018)
* Qt: Fix invoices tab crash (#4941)
* Android: Minor GUI improvements
# Release 3.3.0 - Hodler's Edition (December 19, 2018)
* The network layer has been rewritten using asyncio and aiorpcx.
In addition to easier maintenance, this makes the client
more robust against misbehaving servers.
* The minimum python version was increased to 3.6
* The blockchain headers and fork handling logic has been generalized.
Clients by default now follow chain based on most work, not length.
* New wallet creation defaults to native segwit (bech32).
* Segwit 2FA: TrustedCoin now supports native segwit p2wsh
two-factor wallets.
* RBF batching (opt-in): If the wallet has an unconfirmed RBF
transaction, new payments will be added to that transaction,
instead of creating new transactions.
* MacOS: support QR code scanner in binaries.
* Android APK:
- build using Google NDK instead of Crystax NDK
- target API 28
- do not use external storage (previously for block headers)
* hardware wallets:
- Coldcard now supports spending from p2wpkh-p2sh,
fixed p2pkh signing for fw 1.1.0
- Archos Safe-T mini: fix #4726 signing issue
- KeepKey: full segwit support
- Trezor: refactoring and compat with python-trezor 0.11
- Digital BitBox: support firmware v5.0.0
* fix bitcoin URI handling when app already running (#4796)
* Qt listings rewritten:
the History tab now uses QAbstractItemModel, the other tabs use
QStandardItemModel. Performance should be better for large wallets.
* Several other minor bugfixes and usability improvements.
# Release 3.2.3 - (September 3, 2018) # Release 3.2.3 - (September 3, 2018)
* hardware wallet: the Safe-T mini from Archos is now supported. * hardware wallet: the Safe-T mini from Archos is now supported.
@ -394,7 +223,7 @@ issue #3374. Users should upgrade to 3.0.5.
* Qt GUI: sweeping now uses the Send tab, allowing fees to be set * Qt GUI: sweeping now uses the Send tab, allowing fees to be set
* Windows: if using the installer binary, there is now a separate shortcut * Windows: if using the installer binary, there is now a separate shortcut
for "Electrum Testnet" for "Electrum Testnet"
* Digital Bitbox: added support for p2sh-segwit * Digital Bitbox: added suport for p2sh-segwit
* OS notifications for incoming transactions * OS notifications for incoming transactions
* better transaction size estimation: * better transaction size estimation:
- fees for segwit txns were somewhat underestimated (#3347) - fees for segwit txns were somewhat underestimated (#3347)
@ -622,7 +451,7 @@ issue #3374. Users should upgrade to 3.0.5.
# Release 2.7.7 # Release 2.7.7
* Fix utf8 encoding bug with old wallet seeds (issue #1967) * Fix utf8 encoding bug with old wallet seeds (issue #1967)
* Fix delete request from menu (issue #1968) * Fix delete request from menu (isue #1968)
# Release 2.7.6 # Release 2.7.6
* Fixes a critical bug with imported private keys (issue #1966). Keys * Fixes a critical bug with imported private keys (issue #1966). Keys
@ -985,7 +814,7 @@ issue #3374. Users should upgrade to 3.0.5.
* New 'Receive' tab in the GUI: * New 'Receive' tab in the GUI:
- create and manage payment requests, with QR Codes - create and manage payment requests, with QR Codes
- the former 'Receive' tab was renamed to 'Addresses' - the former 'Receive' tab was renamed to 'Addresses'
- the former Point of Sale plugin is replaced by a resizable - the former Point of Sale plugin is replaced by a resizeable
window that pops up if you click on the QR code window that pops up if you click on the QR code
* The 'Send' tab in the Qt GUI supports transactions with multiple * The 'Send' tab in the Qt GUI supports transactions with multiple
@ -1008,7 +837,7 @@ issue #3374. Users should upgrade to 3.0.5.
* The client accepts servers with a CA-signed SSL certificate. * The client accepts servers with a CA-signed SSL certificate.
* ECIES encrypt/decrypt methods, available in the GUI and using * ECIES encrypt/decrypt methods, availabe in the GUI and using
the command line: the command line:
encrypt <pubkey> <message> encrypt <pubkey> <message>
decrypt <pubkey> <message> decrypt <pubkey> <message>
@ -1081,7 +910,7 @@ bugfixes: connection problems, transactions staying unverified
# Release 1.8.1 # Release 1.8.1
* Notification option when receiving new transactions * Notification option when receiving new tranactions
* Confirm dialogue before sending large amounts * Confirm dialogue before sending large amounts
* Alternative datafile location for non-windows systems * Alternative datafile location for non-windows systems
* Fix offline wallet creation * Fix offline wallet creation

View file

@ -1,19 +0,0 @@
# Security Policy
## Reporting a Vulnerability
To report security issues send an email to electrumdev@gmail.com.
The following keys may be used to communicate sensitive information to developers:
| Name | Fingerprint |
|------|-------------|
| ThomasV | 6694 D8DE 7BE8 EE56 31BE D950 2BD5 824B 7F94 70E6 |
| SomberNight | 4AD6 4339 DFA0 5E20 B3F6 AD51 E7B7 48CD AF5E 5ED9 |
You can import a key by running the following command with that
individuals fingerprint: `gpg --recv-keys "<fingerprint>"`
Ensure that you put quotes around fingerprints containing spaces.
These public keys can also be found in the Electrum git repository,
in the top-level `pubkeys` folder.

View file

@ -1,16 +0,0 @@
Source tarballs
===============
✗ _This script does not produce reproducible output (yet!)._
1. Prepare python dependencies used by Electrum.
```
contrib/make_packages
```
2. Create source tarball.
```
contrib/make_tgz
```

View file

@ -1,31 +0,0 @@
FROM ubuntu:16.04@sha256:a4fc0c40360ff2224db3a483e5d80e9164fe3fdce2a8439d2686270643974632
ENV LC_ALL=C.UTF-8 LANG=C.UTF-8
RUN apt-get update -q && \
apt-get install -qy \
git=1:2.7.4-0ubuntu1.9 \
wget=1.17.1-1ubuntu1.5 \
make=4.1-6 \
autotools-dev=20150820.1 \
autoconf=2.69-9 \
libtool=2.4.6-0.1 \
xz-utils=5.1.1alpha+20120614-2ubuntu2 \
libssl-dev=1.0.2g-1ubuntu4.18 \
libssl1.0.0=1.0.2g-1ubuntu4.18 \
openssl=1.0.2g-1ubuntu4.18 \
zlib1g-dev=1:1.2.8.dfsg-2ubuntu4.3 \
libffi-dev=3.2.1-4 \
libncurses5-dev=6.0+20160213-1ubuntu1 \
libsqlite3-dev=3.11.0-1ubuntu1.5 \
libusb-1.0-0-dev=2:1.0.20-1 \
libudev-dev=229-4ubuntu21.29 \
gettext=0.19.7-2ubuntu3.1 \
libzbar0=0.10+doc-10ubuntu1 \
libdbus-1-3=1.10.6-1ubuntu3.6 \
libxkbcommon-x11-0=0.5.0-1ubuntu2.1 \
libc6-dev=2.23-0ubuntu11.2 \
&& \
rm -rf /var/lib/apt/lists/* && \
apt-get autoremove -y && \
apt-get clean

View file

@ -1,66 +0,0 @@
AppImage binary for Electrum
============================
✓ _This binary should be reproducible, meaning you should be able to generate
binaries that match the official releases._
This assumes an Ubuntu host, but it should not be too hard to adapt to another
similar system. The host architecture should be x86_64 (amd64).
The docker commands should be executed in the project's root folder.
We currently only build a single AppImage, for x86_64 architecture.
Help to adapt these scripts to build for (some flavor of) ARM would be welcome,
see [issue #5159](https://github.com/spesmilo/electrum/issues/5159).
1. Install Docker
```
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
$ sudo apt-get update
$ sudo apt-get install -y docker-ce
```
2. Build image
```
$ sudo docker build -t electrum-appimage-builder-img contrib/build-linux/appimage
```
3. Build binary
```
$ sudo docker run -it \
--name electrum-appimage-builder-cont \
-v $PWD:/opt/electrum \
--rm \
--workdir /opt/electrum/contrib/build-linux/appimage \
electrum-appimage-builder-img \
./build.sh
```
4. The generated binary is in `./dist`.
## FAQ
### How can I see what is included in the AppImage?
Execute the binary as follows: `./electrum*.AppImage --appimage-extract`
### How to investigate diff between binaries if reproducibility fails?
```
cd dist/
./electrum-*-x86_64.AppImage1 --appimage-extract
mv squashfs-root/ squashfs-root1/
./electrum-*-x86_64.AppImage2 --appimage-extract
mv squashfs-root/ squashfs-root2/
$(cd squashfs-root1; find -type f -exec sha256sum '{}' \; > ./../sha256sum1)
$(cd squashfs-root2; find -type f -exec sha256sum '{}' \; > ./../sha256sum2)
diff sha256sum1 sha256sum2 > d
cat d
```
Useful binary comparison tools:
- vbindiff
- diffoscope

View file

@ -1,11 +0,0 @@
#!/bin/bash
set -e
APPDIR="$(dirname "$(readlink -e "$0")")"
export LD_LIBRARY_PATH="${APPDIR}/usr/lib/:${APPDIR}/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}"
export PATH="${APPDIR}/usr/bin:${PATH}"
export LDFLAGS="-L${APPDIR}/usr/lib/x86_64-linux-gnu -L${APPDIR}/usr/lib"
exec "${APPDIR}/usr/bin/python3.7" -s "${APPDIR}/usr/bin/electrum" "$@"

View file

@ -1,243 +0,0 @@
#!/bin/bash
set -e
PROJECT_ROOT="$(dirname "$(readlink -e "$0")")/../../.."
CONTRIB="$PROJECT_ROOT/contrib"
CONTRIB_APPIMAGE="$CONTRIB/build-linux/appimage"
DISTDIR="$PROJECT_ROOT/dist"
BUILDDIR="$CONTRIB_APPIMAGE/build/appimage"
APPDIR="$BUILDDIR/electrum.AppDir"
CACHEDIR="$CONTRIB_APPIMAGE/.cache/appimage"
export GCC_STRIP_BINARIES="1"
# pinned versions
PYTHON_VERSION=3.7.6
PKG2APPIMAGE_COMMIT="eb8f3acdd9f11ab19b78f5cb15daa772367daf15"
SQUASHFSKIT_COMMIT="ae0d656efa2d0df2fcac795b6823b44462f19386"
VERSION=`git describe --tags --dirty --always`
APPIMAGE="$DISTDIR/electrum-$VERSION-x86_64.AppImage"
. "$CONTRIB"/build_tools_util.sh
rm -rf "$BUILDDIR"
mkdir -p "$APPDIR" "$CACHEDIR" "$DISTDIR"
# potential leftover from setuptools that might make pip put garbage in binary
rm -rf "$PROJECT_ROOT/build"
info "downloading some dependencies."
download_if_not_exist "$CACHEDIR/functions.sh" "https://raw.githubusercontent.com/AppImage/pkg2appimage/$PKG2APPIMAGE_COMMIT/functions.sh"
verify_hash "$CACHEDIR/functions.sh" "78b7ee5a04ffb84ee1c93f0cb2900123773bc6709e5d1e43c37519f590f86918"
download_if_not_exist "$CACHEDIR/appimagetool" "https://github.com/AppImage/AppImageKit/releases/download/12/appimagetool-x86_64.AppImage"
verify_hash "$CACHEDIR/appimagetool" "d918b4df547b388ef253f3c9e7f6529ca81a885395c31f619d9aaf7030499a13"
download_if_not_exist "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz"
verify_hash "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" "55a2cce72049f0794e9a11a84862e9039af9183603b78bc60d89539f82cf533f"
info "building python."
tar xf "$CACHEDIR/Python-$PYTHON_VERSION.tar.xz" -C "$BUILDDIR"
(
cd "$BUILDDIR/Python-$PYTHON_VERSION"
LC_ALL=C export BUILD_DATE=$(date -u -d "@$SOURCE_DATE_EPOCH" "+%b %d %Y")
LC_ALL=C export BUILD_TIME=$(date -u -d "@$SOURCE_DATE_EPOCH" "+%H:%M:%S")
# Patch taken from Ubuntu http://archive.ubuntu.com/ubuntu/pool/main/p/python3.7/python3.7_3.7.6-1.debian.tar.xz
patch -p1 < "$CONTRIB_APPIMAGE/patches/python-3.7-reproducible-buildinfo.diff"
./configure \
--cache-file="$CACHEDIR/python.config.cache" \
--prefix="$APPDIR/usr" \
--enable-ipv6 \
--enable-shared \
-q
make -j4 -s || fail "Could not build Python"
make -s install > /dev/null || fail "Could not install Python"
# When building in docker on macOS, python builds with .exe extension because the
# case insensitive file system of macOS leaks into docker. This causes the build
# to result in a different output on macOS compared to Linux. We simply patch
# sysconfigdata to remove the extension.
# Some more info: https://bugs.python.org/issue27631
sed -i -e 's/\.exe//g' "$APPDIR"/usr/lib/python3.7/_sysconfigdata*
)
info "Building squashfskit"
git clone "https://github.com/squashfskit/squashfskit.git" "$BUILDDIR/squashfskit"
(
cd "$BUILDDIR/squashfskit"
git checkout "$SQUASHFSKIT_COMMIT"
make -C squashfs-tools mksquashfs || fail "Could not build squashfskit"
)
MKSQUASHFS="$BUILDDIR/squashfskit/squashfs-tools/mksquashfs"
"$CONTRIB"/make_libsecp256k1.sh || fail "Could not build libsecp"
cp -f "$PROJECT_ROOT/electrum/libsecp256k1.so.0" "$APPDIR/usr/lib/libsecp256k1.so.0" || fail "Could not copy libsecp to its destination"
appdir_python() {
env \
PYTHONNOUSERSITE=1 \
LD_LIBRARY_PATH="$APPDIR/usr/lib:$APPDIR/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}" \
"$APPDIR/usr/bin/python3.7" "$@"
}
python='appdir_python'
info "installing pip."
"$python" -m ensurepip
info "preparing electrum-locale."
(
cd "$PROJECT_ROOT"
git submodule update --init
pushd "$CONTRIB"/deterministic-build/electrum-locale
if ! which msgfmt > /dev/null 2>&1; then
fail "Please install gettext"
fi
for i in ./locale/*; do
dir="$PROJECT_ROOT/electrum/$i/LC_MESSAGES"
mkdir -p $dir
msgfmt --output-file="$dir/electrum.mo" "$i/electrum.po" || true
done
popd
)
info "installing electrum and its dependencies."
mkdir -p "$CACHEDIR/pip_cache"
"$python" -m pip install --no-dependencies --no-warn-script-location --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements.txt"
"$python" -m pip install --no-dependencies --no-warn-script-location --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-binaries.txt"
"$python" -m pip install --no-dependencies --no-warn-script-location --cache-dir "$CACHEDIR/pip_cache" -r "$CONTRIB/deterministic-build/requirements-hw.txt"
"$python" -m pip install --no-dependencies --no-warn-script-location --cache-dir "$CACHEDIR/pip_cache" "$PROJECT_ROOT"
# was only needed during build time, not runtime
"$python" -m pip uninstall -y Cython
info "copying zbar"
cp "/usr/lib/x86_64-linux-gnu/libzbar.so.0" "$APPDIR/usr/lib/libzbar.so.0"
info "desktop integration."
cp "$PROJECT_ROOT/electrum.desktop" "$APPDIR/electrum.desktop"
cp "$PROJECT_ROOT/electrum/gui/icons/electrum.png" "$APPDIR/electrum.png"
# add launcher
cp "$CONTRIB_APPIMAGE/apprun.sh" "$APPDIR/AppRun"
info "finalizing AppDir."
(
export PKG2AICOMMIT="$PKG2APPIMAGE_COMMIT"
. "$CACHEDIR/functions.sh"
cd "$APPDIR"
# copy system dependencies
copy_deps; copy_deps; copy_deps
move_lib
# apply global appimage blacklist to exclude stuff
# move usr/include out of the way to preserve usr/include/python3.7m.
mv usr/include usr/include.tmp
delete_blacklisted
mv usr/include.tmp usr/include
) || fail "Could not finalize AppDir"
info "Copying additional libraries"
(
# On some systems it can cause problems to use the system libusb (on AppImage excludelist)
cp -f /usr/lib/x86_64-linux-gnu/libusb-1.0.so "$APPDIR/usr/lib/libusb-1.0.so" || fail "Could not copy libusb"
# some distros lack libxkbcommon-x11
cp -f /usr/lib/x86_64-linux-gnu/libxkbcommon-x11.so.0 "$APPDIR"/usr/lib/x86_64-linux-gnu || fail "Could not copy libxkbcommon-x11"
)
info "stripping binaries from debug symbols."
# "-R .note.gnu.build-id" also strips the build id
# "-R .comment" also strips the GCC version information
strip_binaries()
{
chmod u+w -R "$APPDIR"
{
printf '%s\0' "$APPDIR/usr/bin/python3.7"
find "$APPDIR" -type f -regex '.*\.so\(\.[0-9.]+\)?$' -print0
} | xargs -0 --no-run-if-empty --verbose strip -R .note.gnu.build-id -R .comment
}
strip_binaries
remove_emptydirs()
{
find "$APPDIR" -type d -empty -print0 | xargs -0 --no-run-if-empty rmdir -vp --ignore-fail-on-non-empty
}
remove_emptydirs
info "removing some unneeded stuff to decrease binary size."
rm -rf "$APPDIR"/usr/{share,include}
PYDIR="$APPDIR"/usr/lib/python3.7
rm -rf "$PYDIR"/{test,ensurepip,lib2to3,idlelib,turtledemo}
rm -rf "$PYDIR"/{ctypes,sqlite3,tkinter,unittest}/test
rm -rf "$PYDIR"/distutils/{command,tests}
rm -rf "$PYDIR"/config-3.7m-x86_64-linux-gnu
rm -rf "$PYDIR"/site-packages/{opt,pip,setuptools,wheel}
rm -rf "$PYDIR"/site-packages/Cryptodome/SelfTest
rm -rf "$PYDIR"/site-packages/{psutil,qrcode,websocket}/tests
for component in connectivity declarative help location multimedia quickcontrols2 serialport webengine websockets xmlpatterns ; do
rm -rf "$PYDIR"/site-packages/PyQt5/Qt/translations/qt${component}_*
rm -rf "$PYDIR"/site-packages/PyQt5/Qt/resources/qt${component}_*
done
rm -rf "$PYDIR"/site-packages/PyQt5/Qt/{qml,libexec}
rm -rf "$PYDIR"/site-packages/PyQt5/{pyrcc.so,pylupdate.so,uic}
rm -rf "$PYDIR"/site-packages/PyQt5/Qt/plugins/{bearer,gamepads,geometryloaders,geoservices,playlistformats,position,renderplugins,sceneparsers,sensors,sqldrivers,texttospeech,webview}
for component in Bluetooth Concurrent Designer Help Location NetworkAuth Nfc Positioning PositioningQuick Qml Quick Sensors SerialPort Sql Test Web Xml ; do
rm -rf "$PYDIR"/site-packages/PyQt5/Qt/lib/libQt5${component}*
rm -rf "$PYDIR"/site-packages/PyQt5/Qt${component}*
done
rm -rf "$PYDIR"/site-packages/PyQt5/Qt.so
# these are deleted as they were not deterministic; and are not needed anyway
find "$APPDIR" -path '*/__pycache__*' -delete
# note that jsonschema-*.dist-info is needed by that package as it uses 'pkg_resources.get_distribution'
# also, see https://gitlab.com/python-devs/importlib_metadata/issues/71
for f in "$PYDIR"/site-packages/jsonschema-*.dist-info; do mv "$f" "$(echo "$f" | sed s/\.dist-info/\.dist-info2/)"; done
for f in "$PYDIR"/site-packages/importlib_metadata-*.dist-info; do mv "$f" "$(echo "$f" | sed s/\.dist-info/\.dist-info2/)"; done
rm -rf "$PYDIR"/site-packages/*.dist-info/
rm -rf "$PYDIR"/site-packages/*.egg-info/
for f in "$PYDIR"/site-packages/jsonschema-*.dist-info2; do mv "$f" "$(echo "$f" | sed s/\.dist-info2/\.dist-info/)"; done
for f in "$PYDIR"/site-packages/importlib_metadata-*.dist-info2; do mv "$f" "$(echo "$f" | sed s/\.dist-info2/\.dist-info/)"; done
find -exec touch -h -d '2000-11-11T11:11:11+00:00' {} +
info "creating the AppImage."
(
cd "$BUILDDIR"
cp "$CACHEDIR/appimagetool" "$CACHEDIR/appimagetool_copy"
# zero out "appimage" magic bytes, as on some systems they confuse the linker
sed -i 's|AI\x02|\x00\x00\x00|' "$CACHEDIR/appimagetool_copy"
chmod +x "$CACHEDIR/appimagetool_copy"
"$CACHEDIR/appimagetool_copy" --appimage-extract
# We build a small wrapper for mksquashfs that removes the -mkfs-fixed-time option
# that mksquashfs from squashfskit does not support. It is not needed for squashfskit.
cat > ./squashfs-root/usr/lib/appimagekit/mksquashfs << EOF
#!/bin/sh
args=\$(echo "\$@" | sed -e 's/-mkfs-fixed-time 0//')
"$MKSQUASHFS" \$args
EOF
env VERSION="$VERSION" ARCH=x86_64 SOURCE_DATE_EPOCH=1530212462 ./squashfs-root/AppRun --no-appstream --verbose "$APPDIR" "$APPIMAGE"
)
info "done."
ls -la "$DISTDIR"
sha256sum "$DISTDIR"/*

View file

@ -1,13 +0,0 @@
# DP: Build getbuildinfo.o with DATE/TIME values when defined
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -766,6 +766,8 @@ Modules/getbuildinfo.o: $(PARSER_OBJS) \
-DGITVERSION="\"`LC_ALL=C $(GITVERSION)`\"" \
-DGITTAG="\"`LC_ALL=C $(GITTAG)`\"" \
-DGITBRANCH="\"`LC_ALL=C $(GITBRANCH)`\"" \
+ $(if $(BUILD_DATE),-DDATE='"$(BUILD_DATE)"') \
+ $(if $(BUILD_TIME),-DTIME='"$(BUILD_TIME)"') \
-o $@ $(srcdir)/Modules/getbuildinfo.c
Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile

10
contrib/build-osx/make_osx Normal file → Executable file
View file

@ -13,7 +13,7 @@ src_dir=$(dirname "$0")
cd $src_dir/../.. cd $src_dir/../..
export PYTHONHASHSEED=22 export PYTHONHASHSEED=22
VERSION=`git describe --tags --dirty --always` VERSION=`git describe --tags --dirty`
which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue" which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue"
@ -30,7 +30,7 @@ fail "Unable to use Python $PYTHON_VERSION"
info "Installing pyinstaller" info "Installing pyinstaller"
python3 -m pip install -I --user pyinstaller==3.4 || fail "Could not install pyinstaller" python3 -m pip install git+https://github.com/ecdsa/pyinstaller@fix_2952 -I --user || fail "Could not install pyinstaller"
info "Using these versions for building $PACKAGE:" info "Using these versions for building $PACKAGE:"
sw_vers sw_vers
@ -90,11 +90,5 @@ done
info "Building binary" info "Building binary"
pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/build-osx/osx.spec || fail "Could not build binary" pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/build-osx/osx.spec || fail "Could not build binary"
info "Adding bitcoin URI types to Info.plist"
plutil -insert 'CFBundleURLTypes' \
-xml '<array><dict> <key>CFBundleURLName</key> <string>bitcoin</string> <key>CFBundleURLSchemes</key> <array><string>bitcoin</string></array> </dict></array>' \
-- dist/$PACKAGE.app/Contents/Info.plist \
|| fail "Could not add keys to Info.plist. Make sure the program 'plutil' exists and is installed."
info "Creating .DMG" info "Creating .DMG"
hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG" hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG"

0
contrib/build-osx/package.sh Normal file → Executable file
View file

View file

@ -1,43 +0,0 @@
FROM ubuntu:18.04@sha256:5f4bdc3467537cbbe563e80db2c3ec95d548a9145d64453b06939c4592d67b6d
ENV LC_ALL=C.UTF-8 LANG=C.UTF-8
RUN dpkg --add-architecture i386 && \
apt-get update -q && \
apt-get install -qy \
wget=1.19.4-1ubuntu2.2 \
gnupg2=2.2.4-1ubuntu1.2 \
dirmngr=2.2.4-1ubuntu1.2 \
python3-software-properties=0.96.24.32.1 \
software-properties-common=0.96.24.32.1
RUN apt-get update -q && \
apt-get install -qy \
git=1:2.17.1-1ubuntu0.5 \
p7zip-full=16.02+dfsg-6 \
make=4.1-9.1ubuntu1 \
mingw-w64=5.0.3-1 \
autotools-dev=20180224.1 \
autoconf=2.69-11 \
libtool=2.4.6-2 \
gettext=0.19.8.1-6
RUN wget -nc https://dl.winehq.org/wine-builds/Release.key && \
echo "c51bcb8cc4a12abfbd7c7660eaf90f49674d15e222c262f27e6c96429111b822 Release.key" | sha256sum -c - && \
apt-key add Release.key && \
rm Release.key && \
wget -nc https://dl.winehq.org/wine-builds/winehq.key && \
echo "78b185fabdb323971d13bd329fefc8038e08559aa51c4996de18db0639a51df6 winehq.key" | sha256sum -c - && \
apt-key add winehq.key && \
rm winehq.key && \
apt-add-repository https://dl.winehq.org/wine-builds/ubuntu/ && \
apt-get update -q && \
apt-get install -qy \
wine-stable-amd64:amd64=4.0.3~bionic \
wine-stable-i386:i386=4.0.3~bionic \
wine-stable:amd64=4.0.3~bionic \
winehq-stable:amd64=4.0.3~bionic
RUN rm -rf /var/lib/apt/lists/* && \
apt-get autoremove -y && \
apt-get clean

View file

@ -1,100 +1,37 @@
Windows binaries Windows Binary Builds
================ =====================
✓ _These binaries should be reproducible, meaning you should be able to generate These scripts can be used for cross-compilation of Windows Electrum executables from Linux/Wine.
binaries that match the official releases._
This assumes an Ubuntu (x86_64) host, but it should not be too hard to adapt to another For reproducible builds, see the `docker` folder.
similar system. The docker commands should be executed in the project's root
folder.
1. Install Docker
```
$ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
$ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
$ sudo apt-get update
$ sudo apt-get install -y docker-ce
```
2. Build image
```
$ sudo docker build -t electrum-wine-builder-img contrib/build-wine
```
Note: see [this](https://stackoverflow.com/a/40516974/7499128) if having dns problems
3. Build Windows binaries
It's recommended to build from a fresh clone
(but you can skip this if reproducibility is not necessary).
```
$ FRESH_CLONE=contrib/build-wine/fresh_clone && \
sudo rm -rf $FRESH_CLONE && \
mkdir -p $FRESH_CLONE && \
cd $FRESH_CLONE && \
git clone https://github.com/spesmilo/electrum.git && \
cd electrum
```
And then build from this directory:
```
$ git checkout $REV
$ sudo docker run -it \
--name electrum-wine-builder-cont \
-v $PWD:/opt/wine64/drive_c/electrum \
--rm \
--workdir /opt/wine64/drive_c/electrum/contrib/build-wine \
electrum-wine-builder-img \
./build.sh
```
4. The generated binaries are in `./contrib/build-wine/dist`.
Usage:
Code Signing
============
Electrum Windows builds are signed with a Microsoft Authenticode™ code signing
certificate in addition to the GPG-based signatures.
The advantage of using Authenticode is that Electrum users won't receive a
Windows SmartScreen warning when starting it.
The release signing procedure involves a signer (the holder of the
certificate/key) and one or multiple trusted verifiers:
| Signer | Verifier | 1. Install the following dependencies:
|-----------------------------------------------------------|-----------------------------------|
| Build .exe files using `build.sh` | |
| Sign .exe with `./sign.sh` | |
| Upload signed files to download server | |
| | Build .exe files using `build.sh` |
| | Compare files using `unsign.sh` |
| | Sign .exe file using `gpg -b` |
| Signer and verifiers: | - dirmngr
|-----------------------------------------------------------------------------------------------| - gpg
| Upload signatures to 'electrum-signatures' repo, as `$version/$filename.$builder.asc` | - 7Zip
- Wine (>= v2)
- (and, for building libsecp256k1)
- mingw-w64
- autotools-dev
- autoconf
- libtool
For example:
Verify Integrity of signed binary ```
================================= $ sudo apt-get install wine-development dirmngr gnupg2 p7zip-full
$ sudo apt-get install mingw-w64 autotools-dev autoconf libtool
```
Every user can verify that the official binary was created from the source code in this The binaries are also built by Travis CI, so if you are having problems,
repository. To do so, the Authenticode signature needs to be stripped since the signature [that script](https://github.com/spesmilo/electrum/blob/master/.travis.yml) might help.
is not reproducible.
This procedure removes the differences between the signed and unsigned binary: 2. Make sure `/opt` is writable by the current user.
3. Run `build.sh`.
1. Remove the signature from the signed binary using osslsigncode or signtool. 4. The generated binaries are in `./dist`.
2. Set the COFF image checksum for the signed binary to 0x0. This is necessary
because pyinstaller doesn't generate a checksum.
3. Append null bytes to the _unsigned_ binary until the byte count is a multiple
of 8.
The script `unsign.sh` performs these steps.

95
contrib/build-wine/build-electrum-git.sh Normal file → Executable file
View file

@ -1,37 +1,40 @@
#!/bin/bash #!/bin/bash
NAME_ROOT=lbry-vault NAME_ROOT=electrum
PYTHON_VERSION=3.6.6
# These settings probably don't need any change # These settings probably don't need any change
export WINEPREFIX=/opt/wine64 export WINEPREFIX=/opt/wine64
export WINEDEBUG=-all
export PYTHONDONTWRITEBYTECODE=1 export PYTHONDONTWRITEBYTECODE=1
export PYTHONHASHSEED=22
PYHOME=c:/python3 PYHOME=c:/python$PYTHON_VERSION
PYTHON="wine $PYHOME/python.exe -OO -B" PYTHON="wine $PYHOME/python.exe -OO -B"
# Let's begin! # Let's begin!
cd `dirname $0`
set -e set -e
here="$(dirname "$(readlink -e "$0")")" mkdir -p tmp
cd tmp
. "$CONTRIB"/build_tools_util.sh
pushd $WINEPREFIX/drive_c/electrum pushd $WINEPREFIX/drive_c/electrum
VERSION=`git describe --tags --dirty --always` # Load electrum-icons and electrum-locale for this release
info "Last commit: $VERSION" git submodule init
git submodule update
# Load electrum-locale for this release VERSION=`git describe --tags --dirty || printf 'custom'`
git submodule update --init echo "Last commit: $VERSION"
pushd ./contrib/deterministic-build/electrum-locale pushd ./contrib/deterministic-build/electrum-locale
if ! which msgfmt > /dev/null 2>&1; then if ! which msgfmt > /dev/null 2>&1; then
fail "Please install gettext" echo "Please install gettext"
exit 1
fi fi
for i in ./locale/*; do for i in ./locale/*; do
dir=$WINEPREFIX/drive_c/electrum/electrum/$i/LC_MESSAGES dir=$i/LC_MESSAGES
mkdir -p $dir mkdir -p $dir
msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true
done done
@ -40,82 +43,38 @@ popd
find -exec touch -d '2000-11-11T11:11:11+00:00' {} + find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
popd popd
cp $WINEPREFIX/drive_c/electrum/LICENCE .
cp -r $WINEPREFIX/drive_c/electrum/contrib/deterministic-build/electrum-locale/locale $WINEPREFIX/drive_c/electrum/electrum/
cp $WINEPREFIX/drive_c/electrum/contrib/deterministic-build/electrum-icons/icons_rc.py $WINEPREFIX/drive_c/electrum/electrum/gui/qt/
# Install frozen dependencies # Install frozen dependencies
$PYTHON -m pip install --no-dependencies --no-warn-script-location -r "$CONTRIB"/deterministic-build/requirements.txt $PYTHON -m pip install -r ../../deterministic-build/requirements.txt
$PYTHON -m pip install --no-dependencies --no-warn-script-location -r "$CONTRIB"/deterministic-build/requirements-hw.txt $PYTHON -m pip install -r ../../deterministic-build/requirements-hw.txt
pushd $WINEPREFIX/drive_c/electrum pushd $WINEPREFIX/drive_c/electrum
# see https://github.com/pypa/pip/issues/2195 -- pip makes a copy of the entire directory $PYTHON setup.py install
info "Pip installing Electrum. This might take a long time if the project folder is large."
$PYTHON -m pip install --no-dependencies --no-warn-script-location .
popd popd
cd ..
# these are deleted as they were not deterministic; and are not needed anyway
rm "$WINEPREFIX"/drive_c/python3/Lib/site-packages/jsonschema-*.dist-info/RECORD
rm -rf dist/ rm -rf dist/
# build standalone and portable versions # build standalone and portable versions
info "Running pyinstaller..." wine "C:/python$PYTHON_VERSION/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
wine "$PYHOME/scripts/pyinstaller.exe" --noconfirm --ascii --clean --name $NAME_ROOT-$VERSION -w deterministic.spec
# set timestamps in dist, in order to make the installer reproducible # set timestamps in dist, in order to make the installer reproducible
pushd dist pushd dist
find -exec touch -d '2000-11-11T11:11:11+00:00' {} + find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
popd popd
info "building NSIS installer" # build NSIS installer
# $VERSION could be passed to the electrum.nsi script, but this would require some rewriting in the script itself. # $VERSION could be passed to the electrum.nsi script, but this would require some rewriting in the script itself.
wine "$WINEPREFIX/drive_c/Program Files (x86)/NSIS/makensis.exe" /DPRODUCT_VERSION=$VERSION electrum.nsi wine "$WINEPREFIX/drive_c/Program Files (x86)/NSIS/makensis.exe" /DPRODUCT_VERSION=$VERSION electrum.nsi
cd dist cd dist
mv lbry-vault-setup.exe $NAME_ROOT-$VERSION-setup.exe mv electrum-setup.exe $NAME_ROOT-$VERSION-setup.exe
cd .. cd ..
info "Padding binaries to 8-byte boundaries, and fixing COFF image checksum in PE header" echo "Done."
# note: 8-byte boundary padding is what osslsigncode uses: md5sum dist/electrum*exe
# https://github.com/mtrojnar/osslsigncode/blob/6c8ec4427a0f27c145973450def818e35d4436f6/osslsigncode.c#L3047
(
cd dist
for binary_file in ./*.exe; do
info ">> fixing $binary_file..."
# code based on https://github.com/erocarrera/pefile/blob/bbf28920a71248ed5c656c81e119779c131d9bd4/pefile.py#L5877
python3 <<EOF
pe_file = "$binary_file"
with open(pe_file, "rb") as f:
binary = bytearray(f.read())
pe_offset = int.from_bytes(binary[0x3c:0x3c+4], byteorder="little")
checksum_offset = pe_offset + 88
checksum = 0
# Pad data to 8-byte boundary.
remainder = len(binary) % 8
binary += bytes(8 - remainder)
for i in range(len(binary) // 4):
if i == checksum_offset // 4: # Skip the checksum field
continue
dword = int.from_bytes(binary[i*4:i*4+4], byteorder="little")
checksum = (checksum & 0xffffffff) + dword + (checksum >> 32)
if checksum > 2 ** 32:
checksum = (checksum & 0xffffffff) + (checksum >> 32)
checksum = (checksum & 0xffff) + (checksum >> 16)
checksum = (checksum) + (checksum >> 16)
checksum = checksum & 0xffff
checksum += len(binary)
# Set the checksum
binary[checksum_offset : checksum_offset + 4] = int.to_bytes(checksum, byteorder="little", length=4)
with open(pe_file, "wb") as f:
f.write(binary)
EOF
done
)
sha256sum dist/lbry-vault*.exe

0
contrib/build-wine/build-secp256k1.sh Normal file → Executable file
View file

39
contrib/build-wine/build.sh Normal file → Executable file
View file

@ -1,43 +1,28 @@
#!/bin/bash #!/bin/bash
# Lucky number
export PYTHONHASHSEED=22
set -e here=$(dirname "$0")
here="$(dirname "$(readlink -e "$0")")"
test -n "$here" -a -d "$here" || exit test -n "$here" -a -d "$here" || exit
export CONTRIB="$here/.." echo "Clearing $here/build and $here/dist..."
export PROJECT_ROOT="$CONTRIB/.."
export CACHEDIR="$here/.cache"
export PIP_CACHE_DIR="$CACHEDIR/pip_cache"
export BUILD_TYPE="wine"
export GCC_TRIPLET_HOST="i686-w64-mingw32"
export GCC_TRIPLET_BUILD="x86_64-pc-linux-gnu"
export GCC_STRIP_BINARIES="1"
. "$CONTRIB"/build_tools_util.sh
info "Clearing $here/build and $here/dist..."
rm "$here"/build/* -rf rm "$here"/build/* -rf
rm "$here"/dist/* -rf rm "$here"/dist/* -rf
mkdir -p "$CACHEDIR" "$PIP_CACHE_DIR" mkdir -p /tmp/electrum-build
mkdir -p /tmp/electrum-build/pip-cache
export PIP_CACHE_DIR="/tmp/electrum-build/pip-cache"
if [ -f "$PROJECT_ROOT/electrum/libsecp256k1-0.dll" ]; then $here/build-secp256k1.sh || exit 1
info "libsecp256k1 already built, skipping"
else
"$CONTRIB"/make_libsecp256k1.sh || fail "Could not build libsecp"
fi
$here/prepare-wine.sh || fail "prepare-wine failed" $here/prepare-wine.sh || exit 1
info "Resetting modification time in C:\Python..." echo "Resetting modification time in C:\Python..."
# (Because of some bugs in pyinstaller) # (Because of some bugs in pyinstaller)
pushd /opt/wine64/drive_c/python* pushd /opt/wine64/drive_c/python*
find -exec touch -d '2000-11-11T11:11:11+00:00' {} + find -exec touch -d '2000-11-11T11:11:11+00:00' {} +
popd popd
ls -l /opt/wine64/drive_c/python* ls -l /opt/wine64/drive_c/python*
$here/build-electrum-git.sh || fail "build-electrum-git failed" $here/build-electrum-git.sh && \
echo "Done."
info "Done."

View file

@ -10,7 +10,8 @@ for i, x in enumerate(sys.argv):
else: else:
raise Exception('no name') raise Exception('no name')
PYHOME = 'c:/python3' PYTHON_VERSION = '3.6.6'
PYHOME = 'c:/python' + PYTHON_VERSION
home = 'C:\\electrum\\' home = 'C:\\electrum\\'
@ -22,20 +23,14 @@ hiddenimports += collect_submodules('btchip')
hiddenimports += collect_submodules('keepkeylib') hiddenimports += collect_submodules('keepkeylib')
hiddenimports += collect_submodules('websocket') hiddenimports += collect_submodules('websocket')
hiddenimports += collect_submodules('ckcc') hiddenimports += collect_submodules('ckcc')
hiddenimports += ['PyQt5.QtPrintSupport'] # needed by Revealer
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they # Add libusb binary
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e binaries = [(PYHOME+"/libusb-1.0.dll", ".")]
hiddenimports.remove('safetlib.qt.pinmatrix')
binaries = []
# Workaround for "Retro Look": # Workaround for "Retro Look":
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'qwindowsvista' in b[0]] binaries += [b for b in collect_dynamic_libs('PyQt5') if 'qwindowsvista' in b[0]]
binaries += [('C:/tmp/libsecp256k1-0.dll', '.')] binaries += [('C:/tmp/libsecp256k1.dll', '.')]
binaries += [('C:/tmp/libusb-1.0.dll', '.')]
datas = [ datas = [
(home+'electrum/*.json', 'electrum'), (home+'electrum/*.json', 'electrum'),
@ -43,15 +38,12 @@ datas = [
(home+'electrum/locale', 'electrum/locale'), (home+'electrum/locale', 'electrum/locale'),
(home+'electrum/plugins', 'electrum/plugins'), (home+'electrum/plugins', 'electrum/plugins'),
('C:\\Program Files (x86)\\ZBar\\bin\\', '.'), ('C:\\Program Files (x86)\\ZBar\\bin\\', '.'),
(home+'electrum/gui/icons', 'electrum/gui/icons'),
] ]
datas += collect_data_files('trezorlib') datas += collect_data_files('trezorlib')
datas += collect_data_files('safetlib') datas += collect_data_files('safetlib')
datas += collect_data_files('btchip') datas += collect_data_files('btchip')
datas += collect_data_files('keepkeylib') datas += collect_data_files('keepkeylib')
datas += collect_data_files('ckcc') datas += collect_data_files('ckcc')
datas += collect_data_files('jsonrpcserver')
datas += collect_data_files('jsonrpcclient')
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports # We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
a = Analysis([home+'run_electrum', a = Analysis([home+'run_electrum',
@ -65,6 +57,7 @@ a = Analysis([home+'run_electrum',
home+'electrum/commands.py', home+'electrum/commands.py',
home+'electrum/plugins/cosigner_pool/qt.py', home+'electrum/plugins/cosigner_pool/qt.py',
home+'electrum/plugins/email_requests/qt.py', home+'electrum/plugins/email_requests/qt.py',
home+'electrum/plugins/trezor/client.py',
home+'electrum/plugins/trezor/qt.py', home+'electrum/plugins/trezor/qt.py',
home+'electrum/plugins/safe_t/client.py', home+'electrum/plugins/safe_t/client.py',
home+'electrum/plugins/safe_t/qt.py', home+'electrum/plugins/safe_t/qt.py',
@ -86,24 +79,6 @@ for d in a.datas:
a.datas.remove(d) a.datas.remove(d)
break break
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
qt_bins2remove=('qt5web', 'qt53d', 'qt5game', 'qt5designer', 'qt5quick',
'qt5location', 'qt5test', 'qt5xml', r'pyqt5\qt\qml\qtquick')
print("Removing Qt binaries:", *qt_bins2remove)
for x in a.binaries.copy():
for r in qt_bins2remove:
if x[0].lower().startswith(r):
a.binaries.remove(x)
print('----> Removed x =', x)
qt_data2remove=(r'pyqt5\qt\translations\qtwebengine_locales', )
print("Removing Qt datas:", *qt_data2remove)
for x in a.datas.copy():
for r in qt_data2remove:
if x[0].lower().startswith(r):
a.datas.remove(x)
print('----> Removed x =', x)
# hotfix for #3171 (pre-Win10 binaries) # hotfix for #3171 (pre-Win10 binaries)
a.binaries = [x for x in a.binaries if not x[1].lower().startswith(r'c:\windows')] a.binaries = [x for x in a.binaries if not x[1].lower().startswith(r'c:\windows')]
@ -118,11 +93,11 @@ exe_standalone = EXE(
a.scripts, a.scripts,
a.binaries, a.binaries,
a.datas, a.datas,
name=os.path.join('build\\pyi.win32\\electrum-lbry', cmdline_name + ".exe"), name=os.path.join('build\\pyi.win32\\electrum', cmdline_name + ".exe"),
debug=False, debug=False,
strip=None, strip=None,
upx=False, upx=False,
icon=home+'electrum/gui/icons/electrum.ico', icon=home+'icons/electrum.ico',
console=False) console=False)
# console=True makes an annoying black box pop up, but it does make Electrum output command line commands, with this turned off no output will be given but commands can still be used # console=True makes an annoying black box pop up, but it does make Electrum output command line commands, with this turned off no output will be given but commands can still be used
@ -131,11 +106,11 @@ exe_portable = EXE(
a.scripts, a.scripts,
a.binaries, a.binaries,
a.datas + [ ('is_portable', 'README.md', 'DATA' ) ], a.datas + [ ('is_portable', 'README.md', 'DATA' ) ],
name=os.path.join('build\\pyi.win32\\electrum-lbry', cmdline_name + "-portable.exe"), name=os.path.join('build\\pyi.win32\\electrum', cmdline_name + "-portable.exe"),
debug=False, debug=False,
strip=None, strip=None,
upx=False, upx=False,
icon=home+'electrum/gui/icons/electrum.ico', icon=home+'icons/electrum.ico',
console=False) console=False)
##### #####
@ -145,11 +120,11 @@ exe_dependent = EXE(
pyz, pyz,
a.scripts, a.scripts,
exclude_binaries=True, exclude_binaries=True,
name=os.path.join('build\\pyi.win32\\electrum-lbry', cmdline_name), name=os.path.join('build\\pyi.win32\\electrum', cmdline_name),
debug=False, debug=False,
strip=None, strip=None,
upx=False, upx=False,
icon=home+'electrum/gui/icons/electrum.ico', icon=home+'icons/electrum.ico',
console=False) console=False)
coll = COLLECT( coll = COLLECT(
@ -160,6 +135,6 @@ coll = COLLECT(
strip=None, strip=None,
upx=True, upx=True,
debug=False, debug=False,
icon=home+'electrum/gui/icons/electrum.ico', icon=home+'icons/electrum.ico',
console=False, console=False,
name=os.path.join('dist', 'electrum')) name=os.path.join('dist', 'electrum'))

View file

@ -8,8 +8,7 @@ RUN dpkg --add-architecture i386 && \
wget=1.19.4-1ubuntu2.1 \ wget=1.19.4-1ubuntu2.1 \
gnupg2=2.2.4-1ubuntu1.1 \ gnupg2=2.2.4-1ubuntu1.1 \
dirmngr=2.2.4-1ubuntu1.1 \ dirmngr=2.2.4-1ubuntu1.1 \
python3-software-properties=0.96.24.32.1 \ software-properties-common=0.96.24.32.4 \
software-properties-common=0.96.24.32.1 \
&& \ && \
wget -nc https://dl.winehq.org/wine-builds/Release.key && \ wget -nc https://dl.winehq.org/wine-builds/Release.key && \
apt-key add Release.key && \ apt-key add Release.key && \
@ -20,7 +19,7 @@ RUN dpkg --add-architecture i386 && \
wine-stable-i386:i386=3.0.1~bionic \ wine-stable-i386:i386=3.0.1~bionic \
wine-stable:amd64=3.0.1~bionic \ wine-stable:amd64=3.0.1~bionic \
winehq-stable:amd64=3.0.1~bionic \ winehq-stable:amd64=3.0.1~bionic \
git \ git=1:2.17.1-1ubuntu0.1 \
p7zip-full=16.02+dfsg-6 \ p7zip-full=16.02+dfsg-6 \
make=4.1-9.1ubuntu1 \ make=4.1-9.1ubuntu1 \
mingw-w64=5.0.3-1 \ mingw-w64=5.0.3-1 \

View file

@ -27,19 +27,6 @@ folder.
3. Build Windows binaries 3. Build Windows binaries
It's recommended to build from a fresh clone
(but you can skip this if reproducibility is not necessary).
```
$ FRESH_CLONE=contrib/build-wine/fresh_clone && \
rm -rf $FRESH_CLONE && \
mkdir -p $FRESH_CLONE && \
cd $FRESH_CLONE && \
git clone https://github.com/spesmilo/electrum.git && \
cd electrum
```
And then build from this directory:
``` ```
$ git checkout $REV $ git checkout $REV
$ sudo docker run \ $ sudo docker run \

View file

@ -2,12 +2,12 @@
;Include Modern UI ;Include Modern UI
!include "TextFunc.nsh" ;Needed for the $GetSize function. I know, doesn't sound logical, it isn't. !include "TextFunc.nsh" ;Needed for the $GetSize function. I know, doesn't sound logical, it isn't.
!include "MUI2.nsh" !include "MUI2.nsh"
;-------------------------------- ;--------------------------------
;Variables ;Variables
!define PRODUCT_NAME "LBRY Vault" !define PRODUCT_NAME "Electrum"
!define PRODUCT_WEB_SITE "https://github.com/tzarebczan/electrum" !define PRODUCT_WEB_SITE "https://github.com/spesmilo/electrum"
!define PRODUCT_PUBLISHER "Electrum Technologies GmbH" !define PRODUCT_PUBLISHER "Electrum Technologies GmbH"
!define PRODUCT_UNINST_KEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" !define PRODUCT_UNINST_KEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}"
@ -16,7 +16,7 @@
;Name and file ;Name and file
Name "${PRODUCT_NAME}" Name "${PRODUCT_NAME}"
OutFile "dist/lbry-vault-setup.exe" OutFile "dist/electrum-setup.exe"
;Default installation folder ;Default installation folder
InstallDir "$PROGRAMFILES\${PRODUCT_NAME}" InstallDir "$PROGRAMFILES\${PRODUCT_NAME}"
@ -29,41 +29,41 @@
;Specifies whether or not the installer will perform a CRC on itself before allowing an install ;Specifies whether or not the installer will perform a CRC on itself before allowing an install
CRCCheck on CRCCheck on
;Sets whether or not the details of the install are shown. Can be 'hide' (the default) to hide the details by default, allowing the user to view them, or 'show' to show them by default, or 'nevershow', to prevent the user from ever seeing them. ;Sets whether or not the details of the install are shown. Can be 'hide' (the default) to hide the details by default, allowing the user to view them, or 'show' to show them by default, or 'nevershow', to prevent the user from ever seeing them.
ShowInstDetails show ShowInstDetails show
;Sets whether or not the details of the uninstall are shown. Can be 'hide' (the default) to hide the details by default, allowing the user to view them, or 'show' to show them by default, or 'nevershow', to prevent the user from ever seeing them. ;Sets whether or not the details of the uninstall are shown. Can be 'hide' (the default) to hide the details by default, allowing the user to view them, or 'show' to show them by default, or 'nevershow', to prevent the user from ever seeing them.
ShowUninstDetails show ShowUninstDetails show
;Sets the colors to use for the install info screen (the default is 00FF00 000000. Use the form RRGGBB (in hexadecimal, as in HTML, only minus the leading '#', since # can be used for comments). Note that if "/windows" is specified as the only parameter, the default windows colors will be used. ;Sets the colors to use for the install info screen (the default is 00FF00 000000. Use the form RRGGBB (in hexadecimal, as in HTML, only minus the leading '#', since # can be used for comments). Note that if "/windows" is specified as the only parameter, the default windows colors will be used.
InstallColors /windows InstallColors /windows
;This command sets the compression algorithm used to compress files/data in the installer. (http://nsis.sourceforge.net/Reference/SetCompressor) ;This command sets the compression algorithm used to compress files/data in the installer. (http://nsis.sourceforge.net/Reference/SetCompressor)
SetCompressor /SOLID lzma SetCompressor /SOLID lzma
;Sets the dictionary size in megabytes (MB) used by the LZMA compressor (default is 8 MB). ;Sets the dictionary size in megabytes (MB) used by the LZMA compressor (default is 8 MB).
SetCompressorDictSize 64 SetCompressorDictSize 64
;Sets the text that is shown (by default it is 'Nullsoft Install System vX.XX') in the bottom of the install window. Setting this to an empty string ("") uses the default; to set the string to blank, use " " (a space). ;Sets the text that is shown (by default it is 'Nullsoft Install System vX.XX') in the bottom of the install window. Setting this to an empty string ("") uses the default; to set the string to blank, use " " (a space).
BrandingText "${PRODUCT_NAME} Installer v${PRODUCT_VERSION}" BrandingText "${PRODUCT_NAME} Installer v${PRODUCT_VERSION}"
;Sets what the titlebars of the installer will display. By default, it is 'Name Setup', where Name is specified with the Name command. You can, however, override it with 'MyApp Installer' or whatever. If you specify an empty string (""), the default will be used (you can however specify " " to achieve a blank string) ;Sets what the titlebars of the installer will display. By default, it is 'Name Setup', where Name is specified with the Name command. You can, however, override it with 'MyApp Installer' or whatever. If you specify an empty string (""), the default will be used (you can however specify " " to achieve a blank string)
Caption "${PRODUCT_NAME}" Caption "${PRODUCT_NAME}"
;Adds the Product Version on top of the Version Tab in the Properties of the file. ;Adds the Product Version on top of the Version Tab in the Properties of the file.
VIProductVersion 1.0.0.0 VIProductVersion 1.0.0.0
;VIAddVersionKey - Adds a field in the Version Tab of the File Properties. This can either be a field provided by the system or a user defined field. ;VIAddVersionKey - Adds a field in the Version Tab of the File Properties. This can either be a field provided by the system or a user defined field.
VIAddVersionKey ProductName "${PRODUCT_NAME} Installer" VIAddVersionKey ProductName "${PRODUCT_NAME} Installer"
VIAddVersionKey Comments "The installer for ${PRODUCT_NAME}" VIAddVersionKey Comments "The installer for ${PRODUCT_NAME}"
VIAddVersionKey CompanyName "${PRODUCT_NAME}" VIAddVersionKey CompanyName "${PRODUCT_NAME}"
VIAddVersionKey LegalCopyright "2013-2018 ${PRODUCT_PUBLISHER}" VIAddVersionKey LegalCopyright "2013-2016 ${PRODUCT_PUBLISHER}"
VIAddVersionKey FileDescription "${PRODUCT_NAME} Installer" VIAddVersionKey FileDescription "${PRODUCT_NAME} Installer"
VIAddVersionKey FileVersion ${PRODUCT_VERSION} VIAddVersionKey FileVersion ${PRODUCT_VERSION}
VIAddVersionKey ProductVersion ${PRODUCT_VERSION} VIAddVersionKey ProductVersion ${PRODUCT_VERSION}
VIAddVersionKey InternalName "${PRODUCT_NAME} Installer" VIAddVersionKey InternalName "${PRODUCT_NAME} Installer"
VIAddVersionKey LegalTrademarks "${PRODUCT_NAME} is a trademark of ${PRODUCT_PUBLISHER}" VIAddVersionKey LegalTrademarks "${PRODUCT_NAME} is a trademark of ${PRODUCT_PUBLISHER}"
VIAddVersionKey OriginalFilename "${PRODUCT_NAME}.exe" VIAddVersionKey OriginalFilename "${PRODUCT_NAME}.exe"
;-------------------------------- ;--------------------------------
@ -71,9 +71,9 @@
!define MUI_ABORTWARNING !define MUI_ABORTWARNING
!define MUI_ABORTWARNING_TEXT "Are you sure you wish to abort the installation of ${PRODUCT_NAME}?" !define MUI_ABORTWARNING_TEXT "Are you sure you wish to abort the installation of ${PRODUCT_NAME}?"
!define MUI_ICON "c:\electrum\electrum\gui\icons\electrum.ico" !define MUI_ICON "c:\electrum\icons\electrum.ico"
;-------------------------------- ;--------------------------------
;Pages ;Pages
@ -108,10 +108,10 @@ Section
RMDir /r "$INSTDIR\*.*" RMDir /r "$INSTDIR\*.*"
Delete "$DESKTOP\${PRODUCT_NAME}.lnk" Delete "$DESKTOP\${PRODUCT_NAME}.lnk"
Delete "$SMPROGRAMS\${PRODUCT_NAME}\*.*" Delete "$SMPROGRAMS\${PRODUCT_NAME}\*.*"
;Files to pack into the installer ;Files to pack into the installer
File /r "dist\electrum\*.*" File /r "dist\electrum\*.*"
File "c:\electrum\electrum\gui\icons\electrum.ico" File "c:\electrum\icons\electrum.ico"
;Store installation folder ;Store installation folder
WriteRegStr HKCU "Software\${PRODUCT_NAME}" "" $INSTDIR WriteRegStr HKCU "Software\${PRODUCT_NAME}" "" $INSTDIR
@ -122,23 +122,21 @@ Section
;Create desktop shortcut ;Create desktop shortcut
DetailPrint "Creating desktop shortcut..." DetailPrint "Creating desktop shortcut..."
CreateShortCut "$DESKTOP\${PRODUCT_NAME}.lnk" "$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe" "" CreateShortCut "$DESKTOP\${PRODUCT_NAME}.lnk" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" ""
;Create start-menu items ;Create start-menu items
DetailPrint "Creating start-menu items..." DetailPrint "Creating start-menu items..."
CreateDirectory "$SMPROGRAMS\${PRODUCT_NAME}" CreateDirectory "$SMPROGRAMS\${PRODUCT_NAME}"
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\Uninstall.lnk" "$INSTDIR\Uninstall.exe" "" "$INSTDIR\Uninstall.exe" 0 CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\Uninstall.lnk" "$INSTDIR\Uninstall.exe" "" "$INSTDIR\Uninstall.exe" 0
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME}.lnk" "$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe" "" "$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe" 0 CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME}.lnk" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" "" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" 0
CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME} Testnet.lnk" "$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe" "--testnet" "$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe" 0 CreateShortCut "$SMPROGRAMS\${PRODUCT_NAME}\${PRODUCT_NAME} Testnet.lnk" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" "--testnet" "$INSTDIR\electrum-${PRODUCT_VERSION}.exe" 0
;Links bitcoin: URI's to Electrum ;Links bitcoin: URI's to Electrum
WriteRegStr HKCU "Software\Classes\bitcoin" "" "URL:bitcoin Protocol" WriteRegStr HKCU "Software\Classes\bitcoin" "" "URL:bitcoin Protocol"
WriteRegStr HKCU "Software\Classes\bitcoin" "URL Protocol" "" WriteRegStr HKCU "Software\Classes\bitcoin" "URL Protocol" ""
WriteRegStr HKCU "Software\Classes\bitcoin" "DefaultIcon" "$\"$INSTDIR\electrum.ico, 0$\"" WriteRegStr HKCU "Software\Classes\bitcoin" "DefaultIcon" "$\"$INSTDIR\electrum.ico, 0$\""
WriteRegStr HKCU "Software\Classes\bitcoin\shell\open\command" "" "$\"$INSTDIR\lbry-vault-${PRODUCT_VERSION}.exe$\" $\"%1$\"" WriteRegStr HKCU "Software\Classes\bitcoin\shell\open\command" "" "$\"$INSTDIR\electrum-${PRODUCT_VERSION}.exe$\" $\"%1$\""
;Adds an uninstaller possibility to Windows Uninstall or change a program section ;Adds an uninstaller possibility to Windows Uninstall or change a program section
WriteRegStr HKCU "${PRODUCT_UNINST_KEY}" "DisplayName" "$(^Name)" WriteRegStr HKCU "${PRODUCT_UNINST_KEY}" "DisplayName" "$(^Name)"
@ -168,8 +166,8 @@ Section "Uninstall"
Delete "$DESKTOP\${PRODUCT_NAME}.lnk" Delete "$DESKTOP\${PRODUCT_NAME}.lnk"
Delete "$SMPROGRAMS\${PRODUCT_NAME}\*.*" Delete "$SMPROGRAMS\${PRODUCT_NAME}\*.*"
RMDir "$SMPROGRAMS\${PRODUCT_NAME}" RMDir "$SMPROGRAMS\${PRODUCT_NAME}"
DeleteRegKey HKCU "Software\Classes\lbc" DeleteRegKey HKCU "Software\Classes\bitcoin"
DeleteRegKey HKCU "Software\${PRODUCT_NAME}" DeleteRegKey HKCU "Software\${PRODUCT_NAME}"
DeleteRegKey HKCU "${PRODUCT_UNINST_KEY}" DeleteRegKey HKCU "${PRODUCT_UNINST_KEY}"
SectionEnd SectionEnd

View file

@ -1,108 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
Comment: User-ID: Steve Dower (Python Release Signing) <steve.dower@microsoft.com>
Comment: Created: 2015-04-06 02:32
Comment: Type: 4096-bit RSA
Comment: Usage: Signing, Encryption, Certifying User-IDs
Comment: Fingerprint: 7ED10B6531D7C8E1BC296021FC624643487034E5
mQINBFUh1AUBEACdUPt6PwJVO23zGZqgtgBeA9JsO22dk3CMzrwPJdUmMd6mcRWa
vl4BoAba66fuC17GvOgGXimKI+iaw5Vt9QI3uSjUjFSfc24J8T7NB/yAr/0zEcex
raHD2dxT/JpE/iY0yWHxRlitvwGSw1Qlq3NnY8tDI1DJEJD+gBuCktvVvu1FfQTw
6bd+aEq0c4sWJHAOnKLuLH0pNFOznnynAFGPGBBsm/YwYc5BP2JVvka775LUjA+W
1h2Sgg3FAUPIm64pc4Pq6mUo6Tulw72xsWMpCL1/5atXNPXT6rJUOB8euTcNMr4l
1O6GKSsiLeLAuvq4bmhOKtLzjWzXnY1gDVoOfdgpD6o4ZHk4xiVsdVE8hCa/ylz8
1ZwRW2gGo2jP8t3hKciR2i+Qs+6lPNZpeFIxa6Uo9ER1IBgCHHapIR/UdcOFyoS0
MNn7Ui7DLQNM4gI/G17eG9tfvjW2dl4SgFSYWMq/OtXnPDUBGqFUWsn8adOL2PFL
B7kM5ZRTPc5SnY9hoSGa5E20rJZIXcpy1aygRz/xUjoKwNzAySSEyyIorUxZ8KaH
EEBQSsqwe04MXIENqnDozH0/cvP4JXEDSl8EkzMSCWSoavQSIYD5pQppyFQpGHqa
5CuOA25Ja+sgp2xqahtr3fEqZUknPQSoYlnJbaHnzsGSlRAVWMsklsZibQARAQAB
tEBTdGV2ZSBEb3dlciAoUHl0aG9uIFJlbGVhc2UgU2lnbmluZykgPHN0ZXZlLmRv
d2VyQG1pY3Jvc29mdC5jb20+iQEcBBABCAAGBQJYsBphAAoJEEhSKohZ29goZggI
ALKlgyoecD5v3ulh1eoctRqtCOxkAoENEfPt3l5x6N8Wq89yHzf10T1rVioEXOHh
Di1m37DDoQmRJD0sOYQymq10xDGRYAJjyOf3X0pvRkZ+F7T0U4dSV3DasLIHcN26
kRwv1yCYsf0QvhgT6EJZKyUNHtV9qrb9u3A1Zp6epC/EyT8zMZj+21GzTUrnbnug
3Ak9p7+APCZS4Ahh9ZHFuD38MZ7+OwrUd6ot+6cbb1nnQLSAGQOHSp6EP6ktrnsK
zts0L+tzHurxtJgUkR01imJuSFfYpLoZa/L7qXNyEpEUTC/SWzRWD9y2QkM7DLzX
caReVAyJr9rix1lDQbEFIquJAhwEEAEIAAYFAlW2TwMACgkQKeBHm5nIo5fahg/+
IQSSE/yH8Cf82PYI7IGqDVNwRw2o7dq8iscB+fhFHfFFhXANwUUFpzPeDMrMrdmq
Bke7Vg1D3bIFocXYOiNwf2J7f4mBO6OL0VAvDX02Vyh/C2ZSc15uZyU6CWFQMCG8
JOSmgQFs3kMHkL4qtut1Y5reoYesmteIe06UVyRw8yT1R1BkxP2whZ97qwsvUUE9
cVD08wCvH486efw7EswIzYGa1KcZXji0MvjXfksVtkEQQbxMMI7SVXo0345ZReww
buioGL5gvvAPObgU43skORanFHFxiHEKmqgHBHXK/LKqaFUFMKcb4iFTNs2XKrhE
XsEi5EMI1AFsJzjcXRqT50Wi2cZhXeRc70uF6gzqrdWvowa2oOPiO6zGDiTqZCW1
AArk/QBzGtPjVh+nKEdHwnvpK9913UAkAN682h8QkoVPYXOvIKDYZRBr5EfpUyQt
y2r9MYewz0YN4zlGP1PFS9FxncdSZiZJqQVif0CkOp1tdSxLynHcujQgATZNtgcu
X9JwUwPp60MurgOcIZiW3nZw/z/5vzBBadSa9/TIFSJAFNBlqeKdIGQuik0UH+Cz
RRtSFb38F7jMPwr0QUSktuntQ0HWuvNqj4N8DFm45/n5rN190eRotrVDXZmjGein
qWPITuICslGIKAp+Q6y3t7JA71MIbeu/ZY6ZcftOka6JAhwEEAEIAAYFAlZRWicA
CgkQxiNM8COVzQq5bRAAktnXceO3GCivMt9yR1Qr0Ov4A4Q+CJSIL45efLFmS30k
cbkHHtaq+0FZNh2ZaMartC16MUja4a2OUejg53VBhaSVkQrVk/6M/HA6/o6CvIhb
FW/5C+nRWBd5gfvwsWvjrtC3cKZco4wg+yYclkDbSH+2EPDZOKIHpBy46YTz9WQ1
8SJ51WVkNUNiZqRBA6Ny5GFoyd6EpWZYEPelmzNemv3zOrQdVzLV24/mLejcLL2t
KmI6ngX4XViXUCRUU3MH8/V+V2YTQGcTM/6HGaHpN0LTqknf6zEto9q9FiRTaiU2
kzExhBq8Qf+cVqwm+1kMt0FGOgpT47VBWMeUWq62gQ3h5NfAs4DfriLgNURlTC1d
JYAEquFhB/8oBQD1h/d9CjQyk88iib2pJInRBDsK2FcfQBap9iaeBFYoBWTzMQJx
g+RuWK1wIm2n0oqa5urBYZtRHE5RIdDP8ZLogrBOFkfXGJxlRBQD1Gab77qohdp0
SnErGw4Ne3gJH/SNhK+zzHkHERIrRZCR95zdYkKfZ2jyOPzSuABVRigEQVQPCDn0
hbv3cblTCeJYwG2mfRdmfyqSMALKIgXe9yvJ2kl8QgaVOsJjNfQzIKeoHFPIm5Uw
3YB6jgDFc5uzEaH7WSz74A7KhGYjC7huw2TugosHbWxphJKddwxfK1WujYaAeJyJ
AhwEEAEIAAYFAlf2sPIACgkQfb+tds3soNuXEQ//XkWYHmJsKyeDZC8MFU+/vsVq
dhnFs6UXZkvf7MoNFkuMDL+zgVoMpFHftTdyBqNAoEnndakk212jK8YWF8g4kQXI
a9uMRqJLM4mqCl9yco/twJ9z9EMA+JLSXYK0ZbTkLdutSDZEDKgpHbmekx2C1OsW
lRLs9PahF5PAZQs0N+m+LJBnw6bEHOSTv4OE5uVUf9nvdes3OARvkGSEGURNmUaF
chxWtZ/SF1q9Jfj0K/xgs9Gt855oueveRXLIGpjiEVoKH/drsgyKFMJVrpZDDgS4
GVXG8bq3GTFiMAs7BPPd9bjI+jgvqttgItZcYsW/IQK1BIoG6Fere4cPvu+IshCc
km9T8nOK98tZuov8hLbND9mW2d7LChJI1r/HbzbKIl0k6OigdFMrJlun2zmtDxT9
Tp3uxOYSaW2YggcpNUjI28tv6AwoA8okVY93LWjO5kdZGkbliRnf/eJy7NJYn0LO
ogsvMUJClRAGnZTHLEr32Whq0MImlXa43kr6oPJT5dwXXyw5ELstEQztczCd1PYB
kbQHUpD5j3PwgNVOinCnbd4pc/qVtYSqpg2g6TJi1XiJ1638jhn2k+i8wop/dyet
iN8lGR76twYGex9AavEAUpVR9r6qfpp4KBibEhdvL6o2O03RQu17GcRzXSAYzmUi
5U5jZ3dBz5MYUjgUZM+JAhwEEAEKAAYFAllTh9gACgkQXLNh5VL7DRAk7Q//X8eU
hwEvl/d9Sv2kBNCZFjAW3QmZp2L/sxhScJZXrOFzKUdmjap9Xlul1qr6/Wif7YLK
bOdNUI7KziEBn+9SEd90XauoVkzU2F0Jn9ILGQfUHAIpocRTKuCwBrncaBozHQwD
O3Dk33AhZ6lqTv/AVLRKHQXwigGTBJxK4cCEZ+VwK9tKk6BrQB48Rm7pg9HF5ey5
JGPRWgUnn1v0IJN5ysZ5m9ChYbqF8VwvMw0txmgKgvdDKpXbF/S59Bp4TH/7Dr2D
kAeNTcuzTFBaFE+siMgksZIYKZ1VkVoiN2qQA7ZaA5LQbUom0WdrKZGefFfPt9ES
A4wyL3OfxRsmWmd/5Fxrwm1VbzgPoMd1Dc5ExlyqnecdGzDui2bmltNqRJd9ytRq
6YUGYzXp4qQkWO61CoC3mkm2M8Ex7DGbUtXhdg0zoa08w9lXuOtHVhY7XlLWjO1U
p8cp4DVxsN/wOXtyH1pcleGo4aEsgyU/DH57prFLGz7Egp2JhRDHnZmlonWp74G1
VLfqkOqZlqTU4mPA827C8qPCx6cMsRvFS7OEiDBswkFWBKjkUCw4rLC1tBMBCxJW
tZlc+Y0LNyOryJ3h6EJmRIHO57oLen345e1WOi4ROOC/wQMErFk7B3P41Lqmrwb8
HGuKn3ca+Aw70hVrZ+7Q3RRFTLlOS/vv107Fqu6JAjkEEwEIACMFAlUh1AUCGwMH
CwkIBwMCAQYVCAIJCgsEFgIDAQIeAQIXgAAKCRD8YkZDSHA05RfdD/97wPXnoe7e
ipP7UXQ942z1buV6pTGv0Lea2aHn20o2BBjHp97YXroF/e/8W6h+Y+Fq8hWoXdYJ
dC9DVgzJhvbXAIG8VrF6/IDGQ62r4ff/AIyQY+kiCOCCVhjwuqOTjVYw2pYRUcI3
UwXVPeptDSXcIZkHCLtEUnS5YMTdkPuZrAmucCCnfcJtevXbHD2yJYP4vwfXMbal
sNBDKJi6uYAFc4yv+/DyS13rfXJvu2pYGvtRd+fs7mBETvUTubhI440pIss6TX6M
lxWexX6Ty8vI5HCQT281H4zqdbe5GdzGmIx1EiYx1sJbgSBNqCh5sRJY5/BXzVJ3
dfM/Mv5QYY4ulO/qUNFdC8f1cZm0euOo3maB4jY+Sjaff7t0WIz0GufO4dHARwJg
3s0LO9Wf5+z/fbWOMcfvvcfaHNbhaKWk16kslc/g7NYvMfOuleM06YGyGPz//a9c
baX53OiMupNvLlhyPO5NfGppvRn5xAElcAw1RLhHJcgvTtIs/zVVfHPaK41u8A9c
XKnmIUC39K4BGvOpPzEvCdQ2ZbAqzQLmZ1UICr15w1Nfs6uoERJbnuq+JgOPOcOk
ezAWELi5LdZTElnpJpZPTDQ03+3GvxD4R9sR+l5RT8Ul7kF+3PPPzekfQzF+Nisr
BhPFb2lPt3Hw32FgTTIuXCMRTKEBb/6z77kCDQRVIdQFARAAtmnsZ9A8ovJIJ9Rl
WeIylEhHRyQifqzgc/r50uDZVPBjewOA462LjH3+F6zFGEkU+q2aqSe0A0SJPF/W
hj6MNYXLoibxi5D4mGkoIao9ExnXt4LXAc6ogQpY6vFQBJU5Nr8XCefQbm0loa/o
y5uK8JHLWCZ2jAossnVpzDwNeN27+B8h5+OifnWhQCTun1xz5EJiyc0yoBmf46zf
mU4CMUBsPvrXcLmw4J3wp35qmrHg1tNyPhd7VBlikMrgtrWX9IaPZ40dnrGG/WjO
FYB3CKxGb0pTCj7GC4ubxo2upeWZqHLmdIVc7Nzsfp8EcwJbTj+jZ2Zfq6F8y+je
sbgh8CaxYn4hEs23aPYRq5H4/buVmZhUw3/AAL9ZmyX6AtAQ0HktVtQe7ykP7DLs
EpeLG+vPJFY363QeDsLHwOoxnZSfGziVlB4N/KqIkixNWcFTG8GSE1zKcdJVNoW+
3MB3+FtMZWUJhH0FyKg5qLaJCtC7Yo5gsddU+QCqTn6gcZBnMX5j4LaAmW4hh1RX
ffwwsbfviK5uhXQCeUnbUaokieetDx4s6Kay6t9ahTRr0r/Z3VWzvr+xATxNWZzi
xTdezCGOB2ycZ0vq4bKXBuN8CAyOy5X1hf7Rc1BiAVQCILHJDtz0Ak/Hax6DAa2A
Hnx9YlugHQf000KroLEY+GaxqYEAEQEAAYkCHwQYAQgACQUCVSHUBQIbDAAKCRD8
YkZDSHA05RtyEACdOEmGolL1xG6I+lDVdot6oBZqC9e021aLWqCUpWJFDp0m0aTm
CfmOI1gTaFjScxhq1W0GPUoJKUZhk3tlVfdSCtUckI+xuWKEfqJYtvUtTXpK4jDe
aZBovJ3KNpJRIynbr1566zCSQJhHiCGWmE/M5KN3gPsORbCBQXEkONSVsslf1Wm6
6hU6uqSWUaceD+4fl5LClbck1DPWchAP7+uLKPEOtORyH6KRTgKl73zYo7xU1K4Q
MN/1aMjobPkqNvvkXnUNwO7QMz18Nx+WqPc4ksJgW1O1aPQ2qL/ARY5jatZ6BBd7
iytfz7d6JOh0FOIlmhBqbWd7fEGrLsSA+EjBGBwW5BnIMmxP1xhjhwrcI18y8kAK
5UzdW2hbbAlc2rlsuxEc+xOYh8kGcc+mZ1j/aMn4gALsTbSO/0T+YJhfODNnL1dC
j7oPbJGmmG6pb/o7P4azBUVC9lHOuV3XlAPjSmJylnNsV7+PxwPlXlvKgh4S4C4Z
PUc/iPetsxXR2djccOoNxVU4CqJBqYKgul/pUphXkh7QfEKyH+42UETbVhstdBVU
azJ6SeUnv9ClVDGsCEhfEZfNOnOoDzJGxDfESoAw7ih91vIhTyHHsK83p2HLDMLP
ptLzx/0AFBfo6MWGGpd2RSnMWNbvh59wiThlDeI+Das3ln5nsAo67dMYdA==
=fjOq
-----END PGP PUBLIC KEY BLOCK-----

205
contrib/build-wine/prepare-wine.sh Normal file → Executable file
View file

@ -1,137 +1,150 @@
#!/bin/bash #!/bin/bash
# Please update these carefully, some versions won't work under Wine # Please update these carefully, some versions won't work under Wine
NSIS_FILENAME=nsis-3.05-setup.exe NSIS_FILENAME=nsis-3.03-setup.exe
NSIS_URL=https://prdownloads.sourceforge.net/nsis/$NSIS_FILENAME?download NSIS_URL=https://prdownloads.sourceforge.net/nsis/$NSIS_FILENAME?download
NSIS_SHA256=1a3cc9401667547b9b9327a177b13485f7c59c2303d4b6183e7bc9e6c8d6bfdb NSIS_SHA256=bd3b15ab62ec6b0c7a00f46022d441af03277be893326f6fea8e212dc2d77743
ZBAR_FILENAME=zbarw-20121031-setup.exe ZBAR_FILENAME=zbarw-20121031-setup.exe
ZBAR_URL=https://sourceforge.net/projects/zbarw/files/$ZBAR_FILENAME/download ZBAR_URL=https://sourceforge.net/projects/zbarw/files/$ZBAR_FILENAME/download
ZBAR_SHA256=177e32b272fa76528a3af486b74e9cb356707be1c5ace4ed3fcee9723e2c2c02 ZBAR_SHA256=177e32b272fa76528a3af486b74e9cb356707be1c5ace4ed3fcee9723e2c2c02
LIBUSB_REPO="https://github.com/libusb/libusb.git" LIBUSB_FILENAME=libusb-1.0.22.7z
LIBUSB_COMMIT=e782eeb2514266f6738e242cdcb18e3ae1ed06fa LIBUSB_URL=https://prdownloads.sourceforge.net/project/libusb/libusb-1.0/libusb-1.0.22/$LIBUSB_FILENAME?download
# ^ tag v1.0.23 LIBUSB_SHA256=671f1a420757b4480e7fadc8313d6fb3cbb75ca00934c417c1efa6e77fb8779b
PYINSTALLER_REPO="https://github.com/SomberNight/pyinstaller.git" PYTHON_VERSION=3.6.6
PYINSTALLER_COMMIT=e934539374e30d1500fcdbe8e4eb0860413935b2
# ^ tag 3.6, plus a custom commit that fixes cross-compilation with MinGW
PYTHON_VERSION=3.6.8
## These settings probably don't need change ## These settings probably don't need change
export WINEPREFIX=/opt/wine64 export WINEPREFIX=/opt/wine64
export WINEDEBUG=-all #export WINEARCH='win32'
PYTHON_FOLDER="python3" PYHOME=c:/python$PYTHON_VERSION
PYHOME="c:/$PYTHON_FOLDER"
PYTHON="wine $PYHOME/python.exe -OO -B" PYTHON="wine $PYHOME/python.exe -OO -B"
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
verify_signature() {
local file=$1 keyring=$2 out=
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
return 0
else
echo "$out" >&2
exit 1
fi
}
verify_hash() {
local file=$1 expected_hash=$2
actual_hash=$(sha256sum $file | awk '{print $1}')
if [ "$actual_hash" == "$expected_hash" ]; then
return 0
else
echo "$file $actual_hash (unexpected hash)" >&2
rm "$file"
exit 1
fi
}
download_if_not_exist() {
local file_name=$1 url=$2
if [ ! -e $file_name ] ; then
wget -O $PWD/$file_name "$url"
fi
}
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
retry() {
local result=0
local count=1
while [ $count -le 3 ]; do
[ $result -ne 0 ] && {
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
}
! { "$@"; result=$?; }
[ $result -eq 0 ] && break
count=$(($count + 1))
sleep 1
done
[ $count -gt 3 ] && {
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
}
return $result
}
# Let's begin! # Let's begin!
here=$(dirname $(readlink -e $0))
set -e set -e
here="$(dirname "$(readlink -e "$0")")"
. "$CONTRIB"/build_tools_util.sh
info "Booting wine."
wine 'wineboot' wine 'wineboot'
# HACK to work around https://bugs.winehq.org/show_bug.cgi?id=42474#c22
# needed for python 3.6+
rm -f /opt/wine-stable/lib/wine/fakedlls/api-ms-win-core-path-l1-1-0.dll
rm -f /opt/wine-stable/lib/wine/api-ms-win-core-path-l1-1-0.dll.so
cd "$CACHEDIR" cd /tmp/electrum-build
mkdir -p $WINEPREFIX/drive_c/tmp
info "Installing Python." # Install Python
# note: you might need "sudo apt-get install dirmngr" for the following # note: you might need "sudo apt-get install dirmngr" for the following
# keys from https://www.python.org/downloads/#pubkeys # keys from https://www.python.org/downloads/#pubkeys
KEYLIST_PYTHON_DEV="531F072D39700991925FED0C0EDDC5F26A45C816 26DEA9D4613391EF3E25C9FF0A5B101836580288 CBC547978A3964D14B9AB36A6AF053F07D9DC8D2 C01E1CAD5EA2C4F0B8E3571504C367C218ADD4FF 12EF3DC38047DA382D18A5B999CDEA9DA4135B38 8417157EDBE73D9EAC1E539B126EB563A74B06BF DBBF2EEBF925FAADCF1F3FFFD9866941EA5BBD71 2BA0DB82515BBB9EFFAC71C5C9BE28DEE6DF025C 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D C9B104B3DD3AA72D7CCB1066FB9921286F5E1540 97FC712E4C024BBEA48A61ED3A5CA953F73C700D 7ED10B6531D7C8E1BC296021FC624643487034E5"
KEYRING_PYTHON_DEV="keyring-electrum-build-python-dev.gpg" KEYRING_PYTHON_DEV="keyring-electrum-build-python-dev.gpg"
gpg --no-default-keyring --keyring $KEYRING_PYTHON_DEV --import "$here"/gpg_keys/7ED10B6531D7C8E1BC296021FC624643487034E5.asc for server in $(shuf -e ha.pool.sks-keyservers.net \
PYTHON_DOWNLOADS="$CACHEDIR/python$PYTHON_VERSION" hkp://p80.pool.sks-keyservers.net:80 \
mkdir -p "$PYTHON_DOWNLOADS" keyserver.ubuntu.com \
hkp://keyserver.ubuntu.com:80) ; do
retry gpg --no-default-keyring --keyring $KEYRING_PYTHON_DEV --keyserver "$server" --recv-keys $KEYLIST_PYTHON_DEV \
&& break || : ;
done
for msifile in core dev exe lib pip tools; do for msifile in core dev exe lib pip tools; do
echo "Installing $msifile..." echo "Installing $msifile..."
download_if_not_exist "$PYTHON_DOWNLOADS/${msifile}.msi" "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi" wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi"
download_if_not_exist "$PYTHON_DOWNLOADS/${msifile}.msi.asc" "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi.asc" wget -N -c "https://www.python.org/ftp/python/$PYTHON_VERSION/win32/${msifile}.msi.asc"
verify_signature "$PYTHON_DOWNLOADS/${msifile}.msi.asc" $KEYRING_PYTHON_DEV verify_signature "${msifile}.msi.asc" $KEYRING_PYTHON_DEV
wine msiexec /i "$PYTHON_DOWNLOADS/${msifile}.msi" /qb TARGETDIR=$PYHOME wine msiexec /i "${msifile}.msi" /qb TARGETDIR=C:/python$PYTHON_VERSION
done done
info "Installing build dependencies." # upgrade pip
$PYTHON -m pip install --no-dependencies --no-warn-script-location -r "$CONTRIB"/deterministic-build/requirements-wine-build.txt $PYTHON -m pip install pip --upgrade
info "Installing dependencies specific to binaries." # Install pywin32-ctypes (needed by pyinstaller)
$PYTHON -m pip install --no-dependencies --no-warn-script-location -r "$CONTRIB"/deterministic-build/requirements-binaries.txt $PYTHON -m pip install pywin32-ctypes==0.1.2
info "Installing ZBar." # install PySocks
download_if_not_exist "$CACHEDIR/$ZBAR_FILENAME" "$ZBAR_URL" $PYTHON -m pip install win_inet_pton==1.0.1
verify_hash "$CACHEDIR/$ZBAR_FILENAME" "$ZBAR_SHA256"
wine "$CACHEDIR/$ZBAR_FILENAME" /S
info "Installing NSIS." $PYTHON -m pip install -r $here/../deterministic-build/requirements-binaries.txt
download_if_not_exist "$CACHEDIR/$NSIS_FILENAME" "$NSIS_URL"
verify_hash "$CACHEDIR/$NSIS_FILENAME" "$NSIS_SHA256"
wine "$CACHEDIR/$NSIS_FILENAME" /S
# Install PyInstaller
$PYTHON -m pip install https://github.com/ecdsa/pyinstaller/archive/fix_2952.zip
info "Compiling libusb..." # Install ZBar
( download_if_not_exist $ZBAR_FILENAME "$ZBAR_URL"
cd "$CACHEDIR" verify_hash $ZBAR_FILENAME "$ZBAR_SHA256"
if [ -f "libusb/libusb/.libs/libusb-1.0.dll" ]; then wine "$PWD/$ZBAR_FILENAME" /S
info "libusb-1.0.dll already built, skipping"
exit 0
fi
rm -rf libusb
mkdir libusb
cd libusb
# Shallow clone
git init
git remote add origin $LIBUSB_REPO
git fetch --depth 1 origin $LIBUSB_COMMIT
git checkout -b pinned FETCH_HEAD
echo "libusb_1_0_la_LDFLAGS += -Wc,-static" >> libusb/Makefile.am
./bootstrap.sh || fail "Could not bootstrap libusb"
host="i686-w64-mingw32"
LDFLAGS="-Wl,--no-insert-timestamp" ./configure \
--host=$host \
--build=x86_64-pc-linux-gnu || fail "Could not run ./configure for libusb"
make -j4 || fail "Could not build libusb"
${host}-strip libusb/.libs/libusb-1.0.dll
) || fail "libusb build failed"
cp "$CACHEDIR/libusb/libusb/.libs/libusb-1.0.dll" $WINEPREFIX/drive_c/tmp/ || fail "Could not copy libusb to its destination"
# Upgrade setuptools (so Electrum can be installed later)
$PYTHON -m pip install setuptools --upgrade
# copy libsecp dll (already built) # Install NSIS installer
cp "$PROJECT_ROOT/electrum/libsecp256k1-0.dll" $WINEPREFIX/drive_c/tmp/ || fail "Could not copy libsecp to its destination" download_if_not_exist $NSIS_FILENAME "$NSIS_URL"
verify_hash $NSIS_FILENAME "$NSIS_SHA256"
wine "$PWD/$NSIS_FILENAME" /S
download_if_not_exist $LIBUSB_FILENAME "$LIBUSB_URL"
verify_hash $LIBUSB_FILENAME "$LIBUSB_SHA256"
7z x -olibusb $LIBUSB_FILENAME -aoa
info "Building PyInstaller." cp libusb/MS32/dll/libusb-1.0.dll $WINEPREFIX/drive_c/python$PYTHON_VERSION/
# we build our own PyInstaller boot loader as the default one has high
# anti-virus false positives
(
cd "$WINEPREFIX/drive_c/electrum"
ELECTRUM_COMMIT_HASH=$(git rev-parse HEAD)
cd "$CACHEDIR"
rm -rf pyinstaller
mkdir pyinstaller
cd pyinstaller
# Shallow clone
git init
git remote add origin $PYINSTALLER_REPO
git fetch --depth 1 origin $PYINSTALLER_COMMIT
git checkout -b pinned FETCH_HEAD
rm -fv PyInstaller/bootloader/Windows-*/run*.exe || true
# add reproducible randomness. this ensures we build a different bootloader for each commit.
# if we built the same one for all releases, that might also get anti-virus false positives
echo "const char *electrum_tag = \"tagged by Electrum@$ELECTRUM_COMMIT_HASH\";" >> ./bootloader/src/pyi_main.c
pushd bootloader
# cross-compile to Windows using host python
python3 ./waf all CC=i686-w64-mingw32-gcc CFLAGS="-static -Wno-dangling-else -Wno-error=unused-value"
popd
# sanity check bootloader is there:
[[ -e PyInstaller/bootloader/Windows-32bit/runw.exe ]] || fail "Could not find runw.exe in target dir!"
) || fail "PyInstaller build failed"
info "Installing PyInstaller."
$PYTHON -m pip install --no-dependencies --no-warn-script-location ./pyinstaller
info "Wine is configured." # add dlls needed for pyinstaller:
cp $WINEPREFIX/drive_c/python$PYTHON_VERSION/Lib/site-packages/PyQt5/Qt/bin/* $WINEPREFIX/drive_c/python$PYTHON_VERSION/
mkdir -p $WINEPREFIX/drive_c/tmp
cp secp256k1/libsecp256k1.dll $WINEPREFIX/drive_c/tmp/
echo "Wine is configured."

0
contrib/build-wine/sign.sh Normal file → Executable file
View file

22
contrib/build-wine/unsign.sh Normal file → Executable file
View file

@ -24,8 +24,28 @@ for mine in $(ls dist/*.exe); do
echo "Downloading https://download.electrum.org/$version/$f" echo "Downloading https://download.electrum.org/$version/$f"
wget -q https://download.electrum.org/$version/$f -O signed/$f wget -q https://download.electrum.org/$version/$f -O signed/$f
out="signed/stripped/$f" out="signed/stripped/$f"
# Remove PE signature from signed binary size=$( wc -c < $mine )
# Step 1: Remove PE signature from signed binary
osslsigncode remove-signature -in signed/$f -out $out > /dev/null 2>&1 osslsigncode remove-signature -in signed/$f -out $out > /dev/null 2>&1
# Step 2: Remove checksum and padding from signed binary
python3 <<EOF
pe_file = "$out"
size= $size
with open(pe_file, "rb") as f:
binary = bytearray(f.read())
pe_offset = int.from_bytes(binary[0x3c:0x3c+4], byteorder="little")
checksum_offset = pe_offset + 88
for b in range(4):
binary[checksum_offset + b] = 0
l = len(binary)
n = l - size
if n > 0:
if binary[-n:] != bytearray(n):
print('expecting failure for', str(pe_file))
binary = binary[:size]
with open(pe_file, "wb") as f:
f.write(binary)
EOF
chmod +x $out chmod +x $out
if cmp -s $out $mine; then if cmp -s $out $mine; then
echo "Success: $f" echo "Success: $f"

View file

@ -1,133 +0,0 @@
#!/usr/bin/env bash
# Set a fixed umask as this leaks into docker containers
umask 0022
RED='\033[0;31m'
BLUE='\033[0;34m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color
function info {
printf "\r💬 ${BLUE}INFO:${NC} ${1}\n"
}
function fail {
printf "\r🗯 ${RED}ERROR:${NC} ${1}\n"
exit 1
}
function warn {
printf "\r⚠ ${YELLOW}WARNING:${NC} ${1}\n"
}
# based on https://superuser.com/questions/497940/script-to-verify-a-signature-with-gpg
function verify_signature() {
local file=$1 keyring=$2 out=
if out=$(gpg --no-default-keyring --keyring "$keyring" --status-fd 1 --verify "$file" 2>/dev/null) &&
echo "$out" | grep -qs "^\[GNUPG:\] VALIDSIG "; then
return 0
else
echo "$out" >&2
exit 1
fi
}
function verify_hash() {
local file=$1 expected_hash=$2
actual_hash=$(sha256sum $file | awk '{print $1}')
if [ "$actual_hash" == "$expected_hash" ]; then
return 0
else
echo "$file $actual_hash (unexpected hash)" >&2
rm "$file"
exit 1
fi
}
function download_if_not_exist() {
local file_name=$1 url=$2
if [ ! -e $file_name ] ; then
wget -O $file_name "$url"
fi
}
# https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
function retry() {
local result=0
local count=1
while [ $count -le 3 ]; do
[ $result -ne 0 ] && {
echo -e "\nThe command \"$@\" failed. Retrying, $count of 3.\n" >&2
}
! { "$@"; result=$?; }
[ $result -eq 0 ] && break
count=$(($count + 1))
sleep 1
done
[ $count -gt 3 ] && {
echo -e "\nThe command \"$@\" failed 3 times.\n" >&2
}
return $result
}
function gcc_with_triplet()
{
TRIPLET="$1"
CMD="$2"
shift 2
if [ -n "$TRIPLET" ] ; then
"$TRIPLET-$CMD" "$@"
else
"$CMD" "$@"
fi
}
function gcc_host()
{
gcc_with_triplet "$GCC_TRIPLET_HOST" "$@"
}
function gcc_build()
{
gcc_with_triplet "$GCC_TRIPLET_BUILD" "$@"
}
function host_strip()
{
if [ "$GCC_STRIP_BINARIES" -ne "0" ] ; then
case "$BUILD_TYPE" in
linux|wine)
gcc_host strip "$@"
;;
darwin)
# TODO: Strip on macOS?
;;
esac
fi
}
# on MacOS, there is no realpath by default
if ! [ -x "$(command -v realpath)" ]; then
function realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
fi
export SOURCE_DATE_EPOCH=1530212462
export PYTHONHASHSEED=22
# Set the build type, overridden by wine build
export BUILD_TYPE="${BUILD_TYPE:-$(uname | tr '[:upper:]' '[:lower:]')}"
# No additional autoconf flags by default
export AUTOCONF_FLAGS=""
# Add host / build flags if the triplets are set
if [ -n "$GCC_TRIPLET_HOST" ] ; then
export AUTOCONF_FLAGS="$AUTOCONF_FLAGS --host=$GCC_TRIPLET_HOST"
fi
if [ -n "$GCC_TRIPLET_BUILD" ] ; then
export AUTOCONF_FLAGS="$AUTOCONF_FLAGS --build=$GCC_TRIPLET_BUILD"
fi
export GCC_STRIP_BINARIES="${GCC_STRIP_BINARIES:-0}"

7
contrib/deterministic-build/check_submodules.sh Normal file → Executable file
View file

@ -18,6 +18,13 @@ function get_git_mtime {
fail=0 fail=0
for f in icons/* "icons.qrc"; do
if (( $(get_git_mtime "$f") > $(get_git_mtime "contrib/deterministic-build/electrum-icons/") )); then
echo "Modification time of $f (" $(get_git_mtime --readable "$f") ") is newer than"\
"last update of electrum-icons"
fail=1
fi
done
if [ $(date +%s -d "2 weeks ago") -gt $(get_git_mtime "contrib/deterministic-build/electrum-locale/") ]; then if [ $(date +%s -d "2 weeks ago") -gt $(get_git_mtime "contrib/deterministic-build/electrum-locale/") ]; then
echo "Last update from electrum-locale is older than 2 weeks."\ echo "Last update from electrum-locale is older than 2 weeks."\

@ -0,0 +1 @@
Subproject commit 0b8cbcca428ceb791527bcbb2ef2b36b4ab29c73

@ -1 +1 @@
Subproject commit aafd932d37f35a1f276909b6ec27d2f7a60e606a Subproject commit 27e36687f4b0fbd126628bdde80758b63ade7347

View file

@ -1,10 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import sys import sys
try: import requests
import requests
except ImportError as e:
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
def check_restriction(p, r): def check_restriction(p, r):

View file

@ -1,27 +1,56 @@
pip==19.3.1 \ pip==10.0.1 \
--hash=sha256:21207d76c1031e517668898a6b46a9fb1501c7a4710ef5dfd6a40ad9e6757ea7 \ --hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
--hash=sha256:6917c65fc3769ecdc61405d3dfd97afdedd75808d200b2838d7d961cebc0c2c7 --hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
PyQt5==5.11.3 \ pycryptodomex==3.6.4 \
--hash=sha256:517e4339135c4874b799af0d484bc2e8c27b54850113a68eec40a0b56534f450 \ --hash=sha256:0461e88a7199f9e88f9f90c2c1e109e9e1f7bbb94dc6192e5df52829d31510c1 \
--hash=sha256:ac1eb5a114b6e7788e8be378be41c5e54b17d5158994504e85e43b5fca006a39 \ --hash=sha256:08d0aba5a72e8af5da118ac4b6a5d75befceca7dd92a031b040ed5ff4417cec2 \
--hash=sha256:d2309296a5a79d0a1c0e6c387c30f0398b65523a6dcc8a19cc172e46b949e00d \ --hash=sha256:0e22d47935d5fa95f556d5f5857576bc6750233964de06a840d58459010c3889 \
--hash=sha256:e85936bae1581bcb908847d2038e5b34237a5e6acc03130099a78930770e7ead --hash=sha256:10ef21d1728ec0b8afc4f8e1d8d9ea66f317154ea18731a4a05bd996cdc33fdf \
PyQt5-sip==4.19.13 \ --hash=sha256:1962b81eef81bf5c42d625816904a22a0bd23d15ca5d49891a54e3c0d0189d84 \
--hash=sha256:125f77c087572c9272219cda030a63c2f996b8507592b2a54d7ef9b75f9f054d \ --hash=sha256:24aae88efe3cbcb4a9cf840b2c352e7de1d6c2c5b3df37ff99b5c7e271e8f3a8 \
--hash=sha256:14c37b06e3fb7c2234cb208fa461ec4e62b4ba6d8b32ca3753c0b2cfd61b00e3 \ --hash=sha256:43ad6d1d7ca545d53360bf412ee70fcb9ede876b4376fc6db06fc7328f70588c \
--hash=sha256:1cb2cf52979f9085fc0eab7e0b2438eb4430d4aea8edec89762527e17317175b \ --hash=sha256:4daabe7c0404e673b9029aa43761c779b9b4df2cbe11ccd94daded6a0acd8808 \
--hash=sha256:4babef08bccbf223ec34464e1ed0a23caeaeea390ca9a3529227d9a57f0d6ee4 \ --hash=sha256:4e15af025e02b04b0d0728e8248e4384d3dc7a3a89a020f5bd4d04ef2c5d9d4c \
--hash=sha256:53cb9c1208511cda0b9ed11cffee992a5a2f5d96eb88722569b2ce65ecf6b960 \ --hash=sha256:5b4d3c4a069a05972e0ed7111071bbcb4727ac652b5d7e8f786e8ea2fe63306b \
--hash=sha256:549449d9461d6c665cbe8af4a3808805c5e6e037cd2ce4fd93308d44a049bfac \ --hash=sha256:67ad8b2ad15a99ae70e287454a112f67d2abaf160ee9c97f9daebf2296066447 \
--hash=sha256:5f5b3089b200ff33de3f636b398e7199b57a6b5c1bb724bdb884580a072a14b5 \ --hash=sha256:6d7e6fb69d9fd2c57e177f8a9cdf6489a725da77568e3d0a226c7dd18504396a \
--hash=sha256:a4d9bf6e1fa2dd6e73f1873f1a47cee11a6ba0cf9ba8cf7002b28c76823600d0 \ --hash=sha256:7907d7a5adde7cd07d19f129a4afa892b68b0b52a07eaf989e48e2677040b4bf \
--hash=sha256:a4ee6026216f1fbe25c8847f9e0fbce907df5b908f84816e21af16ec7666e6fe \ --hash=sha256:88210edafd564c8ff4a68716aaf0627e3bc43e9c192a33d6f5616743f72c2d9b \
--hash=sha256:a91a308a5e0cc99de1e97afd8f09f46dd7ca20cfaa5890ef254113eebaa1adff \ --hash=sha256:8a6b14a90bdcbcdc268acae87126c33bf4250d3842803a93a548d7c10135893a \
--hash=sha256:b0342540da479d2713edc68fb21f307473f68da896ad5c04215dae97630e0069 \ --hash=sha256:94a10446ad61965516aecd610a2dd28d79ab1dfd8723903e1bd19ffa985c208e \
--hash=sha256:f997e21b4e26a3397cb7b255b8d1db5b9772c8e0c94b6d870a5a0ab5c27eacaa --hash=sha256:99bda900a0bf6f9e6c69bdeb6114f7f6730b9d36a47bc1fe144263ce85bfc403 \
setuptools==42.0.2 \ --hash=sha256:9dae2e738622bd35ba82fe0b06f773be137a14e6b28defb2e36efc2d809cd28a \
--hash=sha256:c5b372090d7c8709ce79a6a66872a91e518f7d65af97fca78135e1cb10d4b940 \ --hash=sha256:a04cd6021ff2756c38135a95f81b980485507bccbff4d2b8f62e537552270471 \
--hash=sha256:c8abd0f3574bc23afd2f6fd2c415ba7d9e097c8a99b845473b0d957ba1e2dac6 --hash=sha256:a3b61625b60dd5e72556520a77464e2ac568c20b8ad12ea1f4443bf5051dc624 \
wheel==0.33.6 \ --hash=sha256:a9a91fd9e7967a5bad88d542c9fce09323e15d16cb6fa9b8978390e46e68cbdf \
--hash=sha256:10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646 \ --hash=sha256:afc44f1b595bd736ec3762dd9a2d0ef276a6ac560c85f643acfc4c0bf0c73384 \
--hash=sha256:f4da1763d3becf2e2cd92a14a7c920f0f00eca30fdde9ea992c836685b9faf28 --hash=sha256:b5f3c8912b36e6abb843a51eecb414a1161f80c0ca0b65066c23aa449b5f98db \
--hash=sha256:cc07c8b7686dd7093f33067a02b92f4fed860d75ad2bcc4e60624f70fdb94576 \
--hash=sha256:da646eddbe026306fd1cb2c392a9aee4ebea13f2a9add9af303bb3151786a5d8 \
--hash=sha256:df93eaccd5c09e6380fab8f15c06a89944415e4bb9af64a94f467ce4c782ff8e \
--hash=sha256:e667303019770834354c75022ab0324d5ae5bf7cd7015939678033a58f87ee70 \
--hash=sha256:f921219040ce994c9118b7218b7f7b4e9394e507c97cfc869ce5358437fc26cd
PyQt5==5.10.1 \
--hash=sha256:1e652910bd1ffd23a3a48c510ecad23a57a853ed26b782cd54b16658e6f271ac \
--hash=sha256:4db7113f464c733a99fcb66c4c093a47cf7204ad3f8b3bda502efcc0839ac14b \
--hash=sha256:9c17ab3974c1fc7bbb04cc1c9dae780522c0ebc158613f3025fccae82227b5f7 \
--hash=sha256:f6035baa009acf45e5f460cf88f73580ad5dc0e72330029acd99e477f20a5d61
setuptools==40.0.0 \
--hash=sha256:012adb8e25fbfd64c652e99e7bab58799a3aaf05d39ab38561f69190a909015f \
--hash=sha256:d68abee4eed409fbe8c302ac4d8429a1ffef912cd047a903b5701c024048dd49
SIP==4.19.8 \
--hash=sha256:09f9a4e6c28afd0bafedb26ffba43375b97fe7207bd1a0d3513f79b7d168b331 \
--hash=sha256:105edaaa1c8aa486662226360bd3999b4b89dd56de3e314d82b83ed0587d8783 \
--hash=sha256:1bb10aac55bd5ab0e2ee74b3047aa2016cfa7932077c73f602a6f6541af8cd51 \
--hash=sha256:265ddf69235dd70571b7d4da20849303b436192e875ce7226be7144ca702a45c \
--hash=sha256:52074f7cb5488e8b75b52f34ec2230bc75d22986c7fe5cd3f2d266c23f3349a7 \
--hash=sha256:5ff887a33839de8fc77d7f69aed0259b67a384dc91a1dc7588e328b0b980bde2 \
--hash=sha256:74da4ddd20c5b35c19cda753ce1e8e1f71616931391caeac2de7a1715945c679 \
--hash=sha256:7d69e9cf4f8253a3c0dfc5ba6bb9ac8087b8239851f22998e98cb35cfe497b68 \
--hash=sha256:97bb93ee0ef01ba90f57be2b606e08002660affd5bc380776dd8b0fcaa9e093a \
--hash=sha256:cf98150a99e43fda7ae22abe655b6f202e491d6291486548daa56cb15a2fcf85 \
--hash=sha256:d9023422127b94d11c1a84bfa94933e959c484f2c79553c1ef23c69fe00d25f8 \
--hash=sha256:e72955e12f4fccf27aa421be383453d697b8a44bde2cc26b08d876fd492d0174
wheel==0.31.1 \
--hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
--hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f

View file

@ -1,56 +1,49 @@
btchip-python==0.1.28 \ btchip-python==0.1.27 \
--hash=sha256:da09d0d7a6180d428833795ea9a233c3b317ddfcccea8cc6f0eba59435e5dd83 --hash=sha256:e58a941abbb2d8901bf4858baa18012537c60812c7f895f9a039113ecce3032b
certifi==2019.11.28 \ certifi==2018.4.16 \
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \ --hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f --hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0
chardet==3.0.4 \ chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
ckcc-protocol==0.8.0 \ click==6.7 \
--hash=sha256:bad1d1448423472df95ba67621fdd0ad919e625fbe0a4d3ba93648f34ea286e0 \ --hash=sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d \
--hash=sha256:f0851c98b91825d19567d0d3bac1b28044d40a3d5f194c8b04c5338f114d7ad5 --hash=sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b
click==7.0 \ Cython==0.28.4 \
--hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ --hash=sha256:01487236575df8f17b46982071438dce4f7eaf8acc8fb99fca3510d343cd7a28 \
--hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 --hash=sha256:0671d17c7a27634d6819246e535241b951141ed0e3f6f2a6d618fd32344dae3e \
construct==2.9.45 \ --hash=sha256:0e6190d6971c46729f712dd7307a9c0a8c027bfa5b4d8f2edef106b01759926c \
--hash=sha256:2271a0efd0798679dea825ff47e22a4c550456a5db0ba8baa82f7eae0af0118c --hash=sha256:202587c754901d0678bd6ff89c707f099987928239049a528470c06c6c922cf8 \
Cython==0.29.10 \ --hash=sha256:345197ba9278cf6a914cb7421dc665a0531a219b0072abf6b0cebfdf68e75725 \
--hash=sha256:0afa0b121b89de619e71587e25702e2b7068d7da2164c47e6eee80c17823a62f \ --hash=sha256:3a296b8d6b02f0e01ab04bedea658f43eef5ad2f8e586a820226ead1a677d9b1 \
--hash=sha256:1c608ba76f7a20cc9f0c021b7fe5cb04bc1a70327ae93a9298b1bc3e0edddebe \ --hash=sha256:484572a2b22823a967be106137a93f7d634db116b3f7accb37dbd760eda2fa9f \
--hash=sha256:26229570d6787ff3caa932fe9d802960f51a89239b990d275ae845405ce43857 \ --hash=sha256:4c67c9c803e50ceff32cc5e4769c50fc8ae8df9c4e5cc592ce8310b5a1076d23 \
--hash=sha256:2a9deafa437b6154cac2f25bb88e0bfd075a897c8dc847669d6f478d7e3ee6b1 \ --hash=sha256:539038087c321911745fc2e77049209b1231300d481cb4d682b2f95c724814b3 \
--hash=sha256:2f28396fbce6d9d68a40edbf49a6729cf9d92a4d39ff0f501947a89188e9099f \ --hash=sha256:58113e0683c3688594c112103d7e9f2d0092fd2d8297a220240bea22e184dfdd \
--hash=sha256:3983dd7b67297db299b403b29b328d9e03e14c4c590ea90aa1ad1d7b35fb178b \ --hash=sha256:65cb25ca4284804293a2404d1be3b5a98818be21a72791649bacbcfa4e431d41 \
--hash=sha256:4100a3f8e8bbe47d499cdac00e56d5fe750f739701ea52dc049b6c56f5421d97 \ --hash=sha256:699e765da2580e34b08473fc0acef3a2d7bcb7f13eb29401cd25236bcf000080 \
--hash=sha256:51abfaa7b6c66f3f18028876713c8804e73d4c2b6ceddbcbcfa8ec62429377f0 \ --hash=sha256:6b54c3470810cea49a8be90814d05c5325ceb9c5bf429fd86c36fc1b32dfc157 \
--hash=sha256:61c24f4554efdb8fb1ac6c8e75dab301bcdf2b7b739ed0c2b267493bb43163c5 \ --hash=sha256:71ac1629e4eae2ed329be8caf45efea10bfe1af3d8767e12e64b83e4ea5a3250 \
--hash=sha256:700ccf921b2fdc9b23910e95b5caae4b35767685e0812343fa7172409f1b5830 \ --hash=sha256:722c179d3df8677f3daf45b1a2764678ed4f0aaddbaa7211a8a08ebfd907c0db \
--hash=sha256:7b41eb2e792822a790cb2a171df49d1a9e0baaa8e81f58077b7380a273b93d5f \ --hash=sha256:76ac2b08d3d956d77b574bb43cbf1d37bd58b9d50c04ba281303e695854ebc46 \
--hash=sha256:803987d3b16d55faa997bfc12e8b97f1091f145930dee229b020487aed8a1f44 \ --hash=sha256:7eff1157be9e26bf7494288c89979ca69d593a009e2c7420a739e2cf1e0635f5 \
--hash=sha256:99af5cfcd208c81998dcf44b3ca466dee7e17453cfb50e98b87947c3a86f8753 \ --hash=sha256:99546c8696d27d0efa639c77b2f8af6e61dc3a5073caae4f27ffd991ca926f42 \
--hash=sha256:9faea1cca34501c7e139bc7ef8e504d532b77865c58592493e2c154a003b450f \ --hash=sha256:a0c263b31d335f29c11f4a9e98fbcd908d0731d4ea99bfd27c1c47caaeb4ca2e \
--hash=sha256:a7ba4c9a174db841cfee9a0b92563862a0301d7ca543334666c7266b541f141a \ --hash=sha256:a29c66292605bff962adc26530c030607aa699206b12dfb84f131b0454e15df4 \
--hash=sha256:b26071c2313d1880599c69fd831a07b32a8c961ba69d7ccbe5db1cd8d319a4ca \ --hash=sha256:a4d3724c5a1ddd86d7d830d8e02c40151839b833791dd4b6fe9e144380fa7d37 \
--hash=sha256:b49dc8e1116abde13a3e6a9eb8da6ab292c5a3325155fb872e39011b110b37e6 \ --hash=sha256:aed9f33b19d542eea56c38ef3862ca56147f7903648156cd57eabb0fe47c35d6 \
--hash=sha256:bd40def0fd013569887008baa6da9ca428e3d7247adeeaeada153006227bb2e7 \ --hash=sha256:b57e733dd8871d2cc7358c2e0fe33027453afffbcd0ea6a537f54877cad5131c \
--hash=sha256:bfd0db770e8bd4e044e20298dcae6dfc42561f85d17ee546dcd978c8b23066ae \ --hash=sha256:d5bf4db62236e82955c40bafbaa18d54b20b5ceefa06fb57c7facc443929f4bd \
--hash=sha256:c2fad1efae5889925c8fd7867fdd61f59480e4e0b510f9db096c912e884704f1 \ --hash=sha256:d9272dd71ab78e87fa34a0a59bbd6acc9a9c0005c834a6fc8457ff9619dc6795 \
--hash=sha256:c81aea93d526ccf6bc0b842c91216ee9867cd8792f6725a00f19c8b5837e1715 \ --hash=sha256:e9d5671bcbb90a41b0832fcb3872fcbaca3d68ff11ea09724dd6cbdf31d947fb \
--hash=sha256:da786e039b4ad2bce3d53d4799438cf1f5e01a0108f1b8d78ac08e6627281b1a \ --hash=sha256:ee54646afb2b73b293c94cf079682d18d404ebd6c01122dc3980f111aec2d8ae \
--hash=sha256:deab85a069397540987082d251e9c89e0e5b2e3e044014344ff81f60e211fc4b \ --hash=sha256:f16a87197939977824609005b73f9ebb291b9653a14e5f27afc1c5d6f981ba39
--hash=sha256:e3f1e6224c3407beb1849bdc5ae3150929e593e4cffff6ca41c6ec2b10942c80 \ ecdsa==0.13 \
--hash=sha256:e74eb224e53aae3943d66e2d29fe42322d5753fd4c0641329bccb7efb3a46552 \ --hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
--hash=sha256:ee697c7ea65cb14915a64f36874da8ffc2123df43cf8bc952172e04a26656cd6 \ --hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
--hash=sha256:f37792b16d11606c28e428460bd6a3d14b8917b109e77cdbe4ca78b0b9a52c87 \
--hash=sha256:fd2906b54cbf879c09d875ad4e4687c58d87f5ed03496063fec1c9065569fd5d
ecdsa==0.14.1 \
--hash=sha256:64c613005f13efec6541bb0a33290d0d03c27abab5f15fbab20fb0ee162bdd8e \
--hash=sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe
hidapi==0.7.99.post21 \ hidapi==0.7.99.post21 \
--hash=sha256:1ac170f4d601c340f2cd52fd06e85c5e77bad7ceac811a7bb54b529f7dc28c24 \ --hash=sha256:1ac170f4d601c340f2cd52fd06e85c5e77bad7ceac811a7bb54b529f7dc28c24 \
--hash=sha256:6424ad75da0021ce8c1bcd78056a04adada303eff3c561f8d132b85d0a914cb3 \
--hash=sha256:8d3be666f464347022e2b47caf9132287885d9eacc7895314fc8fefcb4e42946 \ --hash=sha256:8d3be666f464347022e2b47caf9132287885d9eacc7895314fc8fefcb4e42946 \
--hash=sha256:92878bad7324dee619b7832fbfc60b5360d378aa7c5addbfef0a410d8fd342c7 \
--hash=sha256:b4b1f6aff0192e9be153fe07c1b7576cb7a1ff52e78e3f76d867be95301a8e87 \ --hash=sha256:b4b1f6aff0192e9be153fe07c1b7576cb7a1ff52e78e3f76d867be95301a8e87 \
--hash=sha256:bf03f06f586ce7d8aeb697a94b7dba12dc9271aae92d7a8d4486360ff711a660 \ --hash=sha256:bf03f06f586ce7d8aeb697a94b7dba12dc9271aae92d7a8d4486360ff711a660 \
--hash=sha256:c76de162937326fcd57aa399f94939ce726242323e65c15c67e183da1f6c26f7 \ --hash=sha256:c76de162937326fcd57aa399f94939ce726242323e65c15c67e183da1f6c26f7 \
@ -58,41 +51,36 @@ hidapi==0.7.99.post21 \
--hash=sha256:d4b5787a04613503357606bb10e59c3e2c1114fa00ee328b838dd257f41cbd7b \ --hash=sha256:d4b5787a04613503357606bb10e59c3e2c1114fa00ee328b838dd257f41cbd7b \
--hash=sha256:e0be1aa6566979266a8fc845ab0e18613f4918cf2c977fe67050f5dc7e2a9a97 \ --hash=sha256:e0be1aa6566979266a8fc845ab0e18613f4918cf2c977fe67050f5dc7e2a9a97 \
--hash=sha256:edfb16b16a298717cf05b8c8a9ad1828b6ff3de5e93048ceccd74e6ae4ff0922 --hash=sha256:edfb16b16a298717cf05b8c8a9ad1828b6ff3de5e93048ceccd74e6ae4ff0922
idna==2.8 \ idna==2.7 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16
keepkey==6.3.1 \ keepkey==4.0.2 \
--hash=sha256:88e2b5291c85c8e8567732f675697b88241082884aa1aba32257f35ee722fc09 \ --hash=sha256:cddee60ae405841cdff789cbc54168ceaeb2282633420f2be155554c25c69138
--hash=sha256:cef1e862e195ece3e42640a0f57d15a63086fd1dedc8b5ddfcbc9c2657f0bb1e \ libusb1==1.6.4 \
--hash=sha256:f369d640c65fec7fd8e72546304cdc768c04224a6b9b00a19dc2cd06fa9d2a6b --hash=sha256:8c930d9c1d037d9c83924c82608aa6a1adcaa01ca0e4a23ee0e8e18d7eee670d
libusb1==1.7.1 \ mnemonic==0.18 \
--hash=sha256:adf64a4f3f5c94643a1286f8153bcf4bc787c348b38934aacd7fe17fbeebc571 --hash=sha256:02a7306a792370f4a0c106c2cf1ce5a0c84b9dbd7e71c6792fdb9ad88a727f1d
mnemonic==0.19 \ pbkdf2==1.3 \
--hash=sha256:4e37eb02b2cbd56a0079cabe58a6da93e60e3e4d6e757a586d9f23d96abea931 \ --hash=sha256:ac6397369f128212c43064a2b4878038dab78dab41875364554aaf2a684e6979
--hash=sha256:a8d78c5100acfa7df9bab6b9db7390831b0e54490934b718ff9efd68f0d731a6 pip==10.0.1 \
pip==19.3.1 \ --hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
--hash=sha256:21207d76c1031e517668898a6b46a9fb1501c7a4710ef5dfd6a40ad9e6757ea7 \ --hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
--hash=sha256:6917c65fc3769ecdc61405d3dfd97afdedd75808d200b2838d7d961cebc0c2c7 protobuf==3.6.0 \
protobuf==3.11.1 \ --hash=sha256:12985d9f40c104da2f44ec089449214876809b40fdc5d9e43b93b512b9e74056 \
--hash=sha256:0265379852b9e1f76af6d3d3fe4b3c383a595cc937594bda8565cf69a96baabd \ --hash=sha256:12c97fe27af12fc5d66b23f905ab09dd4fb0c68d5a74a419d914580e6d2e71e3 \
--hash=sha256:200b77e51f17fbc1d3049045f5835f60405dec3a00fe876b9b986592e46d908c \ --hash=sha256:327fb9d8a8247bc780b9ea7ed03c0643bc0d22c139b761c9ec1efc7cc3f0923e \
--hash=sha256:29bd1ed46b2536ad8959401a2f02d2d7b5a309f8e97518e4f92ca6c5ba74dbed \ --hash=sha256:3895319db04c0b3baed74fb66be7ba9f4cd8e88a432b8e71032cdf08b2dfee23 \
--hash=sha256:3175d45698edb9a07c1a78a1a4850e674ce8988f20596580158b1d0921d0f057 \ --hash=sha256:695072063e256d32335d48b9484451f7c7948edc3dbd419469d6a778602682fc \
--hash=sha256:34a7270940f86da7a28be466ac541c89b6dbf144a6348b9cf7ac6f56b71006ce \ --hash=sha256:7d786f3ef5b33a04e6538089674f244a3b0f588155016559d950989010af97d0 \
--hash=sha256:38cbc830a4a5ba9956763b0f37090bfd14dd74e72762be6225de2ceac55f4d03 \ --hash=sha256:8bf82bb7a466a54be7272dcb492f71d55a2453a58d862fb74c3f2083f2768543 \
--hash=sha256:665194f5ad386511ac8d8a0bd57b9ab37b8dd2cd71969458777318e774b9cd46 \ --hash=sha256:9bbc1ae1c33c1bd3a2fc05a3aec328544d2b039ff0ce6f000063628a32fad777 \
--hash=sha256:839bad7d115c77cdff29b488fae6a3ab503ce9a4192bd4c42302a6ea8e5d0f33 \ --hash=sha256:9e992c68103ab5635728d29fcf132c669cb4e2db24d012685210276185009d17 \
--hash=sha256:934a9869a7f3b0d84eca460e386fba1f7ba2a0c1a120a2648bc41fadf50efd1c \ --hash=sha256:9f1087abb67b34e55108bc610936b34363a7aac692023bcbb17e065c253a1f80 \
--hash=sha256:aecdf12ef6dc7fd91713a6da93a86c2f2a8fe54840a3b1670853a2b7402e77c9 \ --hash=sha256:9fefcb92a3784b446abf3641d9a14dad815bee88e0edd10b9a9e0e144d01a991 \
--hash=sha256:c4e90bc27c0691c76e09b5dc506133451e52caee1472b8b3c741b7c912ce43ef \ --hash=sha256:a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d \
--hash=sha256:c65d135ea2d85d40309e268106dab02d3bea723db2db21c23ecad4163ced210b \ --hash=sha256:cca22955443c55cf86f963a4ad7057bca95e4dcde84d6a493066d380cfab3bb0 \
--hash=sha256:c98dea04a1ff41a70aff2489610f280004831798cb36a068013eed04c698903d \ --hash=sha256:d7ac50bc06d31deb07ace6de85556c1d7330e5c0958f3b2af85037d6d1182abf \
--hash=sha256:d9049aa194378a426f0b2c784e2054565bf6f754d20fcafdee7102a6250556e8 \ --hash=sha256:dfe6899304b898538f4dc94fa0b281b56b70e40f58afa4c6f807805261cbe2e8
--hash=sha256:e028fee51c96de4e81924484c77111dfdea14010ecfc906ea5b252209b0c4de6 \
--hash=sha256:e84ad26fb50091b1ea676403c0dd2bd47663099454aa6d88000b1dafecab0941 \
--hash=sha256:e88a924b591b06d0191620e9c8aa75297b3111066bb09d49a24bae1054a10c13
pyaes==1.6.1 \
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
pyblake2==1.1.2 \ pyblake2==1.1.2 \
--hash=sha256:3757f7ad709b0e1b2a6b3919fa79fe3261f166fc375cd521f2be480f8319dde9 \ --hash=sha256:3757f7ad709b0e1b2a6b3919fa79fe3261f166fc375cd521f2be480f8319dde9 \
--hash=sha256:407e02c7f8f36fcec1b7aa114ddca0c1060c598142ea6f6759d03710b946a7e3 \ --hash=sha256:407e02c7f8f36fcec1b7aa114ddca0c1060c598142ea6f6759d03710b946a7e3 \
@ -103,28 +91,32 @@ pyblake2==1.1.2 \
--hash=sha256:baa2190bfe549e36163aa44664d4ee3a9080b236fc5d42f50dc6fd36bbdc749e \ --hash=sha256:baa2190bfe549e36163aa44664d4ee3a9080b236fc5d42f50dc6fd36bbdc749e \
--hash=sha256:c53417ee0bbe77db852d5fd1036749f03696ebc2265de359fe17418d800196c4 \ --hash=sha256:c53417ee0bbe77db852d5fd1036749f03696ebc2265de359fe17418d800196c4 \
--hash=sha256:fbc9fcde75713930bc2a91b149e97be2401f7c9c56d735b46a109210f58d7358 --hash=sha256:fbc9fcde75713930bc2a91b149e97be2401f7c9c56d735b46a109210f58d7358
requests==2.22.0 \ requests==2.19.1 \
--hash=sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4 \ --hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 \
--hash=sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31 --hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a
safet==0.1.4 \ safet==0.1.3 \
--hash=sha256:522c257910f9472e9c77c487425ed286f6721c314653e232bc41c6cedece1bb1 \ --hash=sha256:ba80fe9f6ba317ab9514a8726cd3792e68eb46dd419f380d48ae4a0ccae646dc \
--hash=sha256:b152874acdc89ff0c8b2d680bfbf020b3e53527c2ad3404489dd61a548aa56a1 --hash=sha256:e5d8e6a87c8bdf1cefd07004181b93fd7631557fdab09d143ba8d1b29291d6dc
setuptools==42.0.2 \ setuptools==40.0.0 \
--hash=sha256:c5b372090d7c8709ce79a6a66872a91e518f7d65af97fca78135e1cb10d4b940 \ --hash=sha256:012adb8e25fbfd64c652e99e7bab58799a3aaf05d39ab38561f69190a909015f \
--hash=sha256:c8abd0f3574bc23afd2f6fd2c415ba7d9e097c8a99b845473b0d957ba1e2dac6 --hash=sha256:d68abee4eed409fbe8c302ac4d8429a1ffef912cd047a903b5701c024048dd49
six==1.13.0 \ six==1.11.0 \
--hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
--hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66 --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb
trezor==0.11.5 \ trezor==0.10.2 \
--hash=sha256:711137bb83e7e0aef4009745e0da1b7d258146f246b43e3f7f5b849405088ef1 \ --hash=sha256:4dba4d5c53d3ca22884d79fb4aa68905fb8353a5da5f96c734645d8cf537138d \
--hash=sha256:cd8aafd70a281daa644c4a3fb021ffac20b7a88e86226ecc8bb3e78e1734a184 --hash=sha256:d2b32f25982ab403758d870df1d0de86d0751c106ef1cd1289f452880ce68b84
typing-extensions==3.7.4.1 \ urllib3==1.23 \
--hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \ --hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf \
--hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \ --hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5
--hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575 websocket-client==0.48.0 \
urllib3==1.25.7 \ --hash=sha256:18f1170e6a1b5463986739d9fd45c4308b0d025c1b2f9b88788d8f69e8a5eb4a \
--hash=sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293 \ --hash=sha256:db70953ae4a064698b27ae56dcad84d0ee68b7b43cb40940f537738f38f510c1
--hash=sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745 wheel==0.31.1 \
wheel==0.33.6 \ --hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
--hash=sha256:10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646 \ --hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f
--hash=sha256:f4da1763d3becf2e2cd92a14a7c920f0f00eca30fdde9ea992c836685b9faf28 pyaes==1.6.1 \
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
ckcc-protocol==0.7.2 \
--hash=sha256:498db4ccdda018cd9f40210f5bd02ddcc98e7df583170b2eab4035c86c3cc03b \
--hash=sha256:31ee5178cfba8895eb2a6b8d06dc7830b51461a0ff767a670a64707c63e6b264

View file

@ -1,19 +0,0 @@
altgraph==0.16.1 \
--hash=sha256:d6814989f242b2b43025cba7161fc1b8fb487a62cd49c49245d6fd01c18ac997 \
--hash=sha256:ddf5320017147ba7b810198e0b6619bd7b5563aa034da388cea8546b877f9b0c
future==0.18.2 \
--hash=sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d
pefile==2019.4.18 \
--hash=sha256:a5d6e8305c6b210849b47a6174ddf9c452b2888340b8177874b862ba6c207645
pip==19.3.1 \
--hash=sha256:21207d76c1031e517668898a6b46a9fb1501c7a4710ef5dfd6a40ad9e6757ea7 \
--hash=sha256:6917c65fc3769ecdc61405d3dfd97afdedd75808d200b2838d7d961cebc0c2c7
pywin32-ctypes==0.2.0 \
--hash=sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942 \
--hash=sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98
setuptools==42.0.2 \
--hash=sha256:c5b372090d7c8709ce79a6a66872a91e518f7d65af97fca78135e1cb10d4b940 \
--hash=sha256:c8abd0f3574bc23afd2f6fd2c415ba7d9e097c8a99b845473b0d957ba1e2dac6
wheel==0.33.6 \
--hash=sha256:10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646 \
--hash=sha256:f4da1763d3becf2e2cd92a14a7c920f0f00eca30fdde9ea992c836685b9faf28

View file

@ -1,183 +1,69 @@
aiohttp==3.6.2 \ certifi==2018.4.16 \
--hash=sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e \ --hash=sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7 \
--hash=sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326 \ --hash=sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0
--hash=sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a \
--hash=sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654 \
--hash=sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a \
--hash=sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4 \
--hash=sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17 \
--hash=sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec \
--hash=sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd \
--hash=sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48 \
--hash=sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59 \
--hash=sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965
aiohttp-socks==0.2.2 \
--hash=sha256:e473ee222b001fe33798957b9ce3352b32c187cf41684f8e2259427925914993 \
--hash=sha256:eebd8939a7c3c1e3e7e1b2552c60039b4c65ef6b8b2351efcbdd98290538e310
aiorpcX==0.18.4 \
--hash=sha256:bec9c0feb328d62ba80b79931b07f7372c98f2891ad51300be0b7163d5ccfb4a \
--hash=sha256:d424a55bcf52ebf1b3610a7809c0748fac91ce926854ad33ce952463bc6017e8
apply-defaults==0.1.4 \
--hash=sha256:1ce26326a61d8773d38a9726a345c6525a91a6120d7333af79ad792dacb6246c
async-timeout==3.0.1 \
--hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \
--hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3
attrs==19.3.0 \
--hash=sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c \
--hash=sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72
bitstring==3.1.6 \
--hash=sha256:7b60b0c300d0d3d0a24ec84abfda4b0eaed3dc56dc90f6cbfe497166c9ad8443 \
--hash=sha256:c97a8e2a136e99b523b27da420736ae5cb68f83519d633794a6a11192f69f8bf \
--hash=sha256:e392819965e7e0246e3cf6a51d5a54e731890ae03ebbfa3cd0e4f74909072096
certifi==2019.11.28 \
--hash=sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3 \
--hash=sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f
chardet==3.0.4 \ chardet==3.0.4 \
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \ --hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 --hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691
click==6.7 \ dnspython==1.15.0 \
--hash=sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d \ --hash=sha256:40f563e1f7a7b80dc5a4e76ad75c23da53d62f1e15e6e517293b04e1f84ead7c \
--hash=sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b --hash=sha256:861e6e58faa730f9845aaaa9c6c832851fbf89382ac52915a51f89c71accdd31
dnspython==1.16.0 \ ecdsa==0.13 \
--hash=sha256:36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01 \ --hash=sha256:40d002cf360d0e035cf2cb985e1308d41aaa087cbfc135b2dc2d844296ea546c \
--hash=sha256:f69c21288a962f4da86e56c4905b49d11aba7938d3d740e80d9e366ee4f1632d --hash=sha256:64cf1ee26d1cde3c73c6d7d107f835fed7c6a2904aef9eac223d57ad800c43fa
ecdsa==0.14.1 \ idna==2.7 \
--hash=sha256:64c613005f13efec6541bb0a33290d0d03c27abab5f15fbab20fb0ee162bdd8e \ --hash=sha256:156a6814fb5ac1fc6850fb002e0852d56c0c8d2531923a51032d1b70760e186e \
--hash=sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe --hash=sha256:684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16
idna==2.8 \ jsonrpclib-pelix==0.3.1 \
--hash=sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407 \ --hash=sha256:5417b1508d5a50ec64f6e5b88907f111155d52607b218ff3ba9a777afb2e49e3 \
--hash=sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c --hash=sha256:bd89a6093bc4d47dc8a096197aacb827359944a4533be5193f3845f57b9f91b4
idna_ssl==1.1.0 \ pip==10.0.1 \
--hash=sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c --hash=sha256:717cdffb2833be8409433a93746744b59505f42146e8d37de6c62b430e25d6d7 \
importlib-metadata==1.1.0 \ --hash=sha256:f2bd08e0cd1b06e10218feaf6fef299f473ba706582eb3bd9d52203fdbd7ee68
--hash=sha256:b044f07694ef14a6683b097ba56bd081dbc7cdc7c7fe46011e499dfecc082f21 \ protobuf==3.6.0 \
--hash=sha256:e6ac600a142cf2db707b1998382cc7fc3b02befb7273876e01b8ad10b9652742 --hash=sha256:12985d9f40c104da2f44ec089449214876809b40fdc5d9e43b93b512b9e74056 \
jsonrpcclient==3.3.4 \ --hash=sha256:12c97fe27af12fc5d66b23f905ab09dd4fb0c68d5a74a419d914580e6d2e71e3 \
--hash=sha256:c50860409b73af9f94b648439caae3b4af80d5ac937f2a8ac7783de3d1050ba9 --hash=sha256:327fb9d8a8247bc780b9ea7ed03c0643bc0d22c139b761c9ec1efc7cc3f0923e \
jsonrpcserver==4.0.5 \ --hash=sha256:3895319db04c0b3baed74fb66be7ba9f4cd8e88a432b8e71032cdf08b2dfee23 \
--hash=sha256:240c517f49b0fdd3bfa428c9a7cc581126a0c43eca60d29762da124017d9d9f4 --hash=sha256:695072063e256d32335d48b9484451f7c7948edc3dbd419469d6a778602682fc \
jsonschema==3.2.0 \ --hash=sha256:7d786f3ef5b33a04e6538089674f244a3b0f588155016559d950989010af97d0 \
--hash=sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163 \ --hash=sha256:8bf82bb7a466a54be7272dcb492f71d55a2453a58d862fb74c3f2083f2768543 \
--hash=sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a --hash=sha256:9bbc1ae1c33c1bd3a2fc05a3aec328544d2b039ff0ce6f000063628a32fad777 \
more-itertools==8.0.0 \ --hash=sha256:9e992c68103ab5635728d29fcf132c669cb4e2db24d012685210276185009d17 \
--hash=sha256:53ff73f186307d9c8ef17a9600309154a6ae27f25579e80af4db8f047ba14bc2 \ --hash=sha256:9f1087abb67b34e55108bc610936b34363a7aac692023bcbb17e065c253a1f80 \
--hash=sha256:a0ea684c39bc4315ba7aae406596ef191fd84f873d2d2751f84d64e81a7a2d45 --hash=sha256:9fefcb92a3784b446abf3641d9a14dad815bee88e0edd10b9a9e0e144d01a991 \
multidict==4.6.1 \ --hash=sha256:a37836aa47d1b81c2db1a6b7a5e79926062b5d76bd962115a0e615551be2b48d \
--hash=sha256:07f9a6bf75ad675d53956b2c6a2d4ef2fa63132f33ecc99e9c24cf93beb0d10b \ --hash=sha256:cca22955443c55cf86f963a4ad7057bca95e4dcde84d6a493066d380cfab3bb0 \
--hash=sha256:0ffe4d4d28cbe9801952bfb52a8095dd9ffecebd93f84bdf973c76300de783c5 \ --hash=sha256:d7ac50bc06d31deb07ace6de85556c1d7330e5c0958f3b2af85037d6d1182abf \
--hash=sha256:1b605272c558e4c659dbaf0fb32a53bfede44121bcf77b356e6e906867b958b7 \ --hash=sha256:dfe6899304b898538f4dc94fa0b281b56b70e40f58afa4c6f807805261cbe2e8
--hash=sha256:205a011e636d885af6dd0029e41e3514a46e05bb2a43251a619a6e8348b96fc0 \
--hash=sha256:250632316295f2311e1ed43e6b26a63b0216b866b45c11441886ac1543ca96e1 \
--hash=sha256:2bc9c2579312c68a3552ee816311c8da76412e6f6a9cf33b15152e385a572d2a \
--hash=sha256:318aadf1cfb6741c555c7dd83d94f746dc95989f4f106b25b8a83dfb547f2756 \
--hash=sha256:42cdd649741a14b0602bf15985cad0dd4696a380081a3319cd1ead46fd0f0fab \
--hash=sha256:5159c4975931a1a78bf6602bbebaa366747fce0a56cb2111f44789d2c45e379f \
--hash=sha256:87e26d8b89127c25659e962c61a4c655ec7445d19150daea0759516884ecb8b4 \
--hash=sha256:891b7e142885e17a894d9d22b0349b92bb2da4769b4e675665d0331c08719be5 \
--hash=sha256:8d919034420378132d074bf89df148d0193e9780c9fe7c0e495e895b8af4d8a2 \
--hash=sha256:9c890978e2b37dd0dc1bd952da9a5d9f245d4807bee33e3517e4119c48d66f8c \
--hash=sha256:a37433ce8cdb35fc9e6e47e1606fa1bfd6d70440879038dca7d8dd023197eaa9 \
--hash=sha256:c626029841ada34c030b94a00c573a0c7575fe66489cde148785b6535397d675 \
--hash=sha256:cfec9d001a83dc73580143f3c77e898cf7ad78b27bb5e64dbe9652668fcafec7 \
--hash=sha256:efaf1b18ea6c1f577b1371c0159edbe4749558bfe983e13aa24d0a0c01e1ad7b
pip==19.3.1 \
--hash=sha256:21207d76c1031e517668898a6b46a9fb1501c7a4710ef5dfd6a40ad9e6757ea7 \
--hash=sha256:6917c65fc3769ecdc61405d3dfd97afdedd75808d200b2838d7d961cebc0c2c7
protobuf==3.11.1 \
--hash=sha256:0265379852b9e1f76af6d3d3fe4b3c383a595cc937594bda8565cf69a96baabd \
--hash=sha256:200b77e51f17fbc1d3049045f5835f60405dec3a00fe876b9b986592e46d908c \
--hash=sha256:29bd1ed46b2536ad8959401a2f02d2d7b5a309f8e97518e4f92ca6c5ba74dbed \
--hash=sha256:3175d45698edb9a07c1a78a1a4850e674ce8988f20596580158b1d0921d0f057 \
--hash=sha256:34a7270940f86da7a28be466ac541c89b6dbf144a6348b9cf7ac6f56b71006ce \
--hash=sha256:38cbc830a4a5ba9956763b0f37090bfd14dd74e72762be6225de2ceac55f4d03 \
--hash=sha256:665194f5ad386511ac8d8a0bd57b9ab37b8dd2cd71969458777318e774b9cd46 \
--hash=sha256:839bad7d115c77cdff29b488fae6a3ab503ce9a4192bd4c42302a6ea8e5d0f33 \
--hash=sha256:934a9869a7f3b0d84eca460e386fba1f7ba2a0c1a120a2648bc41fadf50efd1c \
--hash=sha256:aecdf12ef6dc7fd91713a6da93a86c2f2a8fe54840a3b1670853a2b7402e77c9 \
--hash=sha256:c4e90bc27c0691c76e09b5dc506133451e52caee1472b8b3c741b7c912ce43ef \
--hash=sha256:c65d135ea2d85d40309e268106dab02d3bea723db2db21c23ecad4163ced210b \
--hash=sha256:c98dea04a1ff41a70aff2489610f280004831798cb36a068013eed04c698903d \
--hash=sha256:d9049aa194378a426f0b2c784e2054565bf6f754d20fcafdee7102a6250556e8 \
--hash=sha256:e028fee51c96de4e81924484c77111dfdea14010ecfc906ea5b252209b0c4de6 \
--hash=sha256:e84ad26fb50091b1ea676403c0dd2bd47663099454aa6d88000b1dafecab0941 \
--hash=sha256:e88a924b591b06d0191620e9c8aa75297b3111066bb09d49a24bae1054a10c13
pyaes==1.6.1 \ pyaes==1.6.1 \
--hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f --hash=sha256:02c1b1405c38d3c370b085fb952dd8bea3fadcee6411ad99f312cc129c536d8f
pycryptodomex==3.9.4 \ PySocks==1.6.8 \
--hash=sha256:0943b65fb41b7403a9def6214061fdd9ab9afd0bbc581e553c72eebe60bded36 \ --hash=sha256:3fe52c55890a248676fd69dc9e3c4e811718b777834bcaab7a8125cf9deac672
--hash=sha256:0a1dbb5c4d975a4ea568fb7686550aa225d94023191fb0cca8747dc5b5d77857 \ QDarkStyle==2.5.4 \
--hash=sha256:0f43f1608518347fdcb9c8f443fa5cabedd33f94188b13e4196a3a7ba90d169c \ --hash=sha256:3eb60922b8c4d9cedecb6897ca4c9f8a259d81bdefe5791976ccdf12432de1f0 \
--hash=sha256:11ce5fec5990e34e3981ed14897ba601c83957b577d77d395f1f8f878a179f98 \ --hash=sha256:51331fc6490b38c376e6ba8d8c814320c8d2d1c2663055bc396321a7c28fa8be
--hash=sha256:17a09e38fdc91e4857cf5a7ce82f3c0b229c3977490f2146513e366923fc256b \ qrcode==6.0 \
--hash=sha256:22d970cee5c096b9123415e183ae03702b2cd4d3ba3f0ced25c4e1aba3967167 \ --hash=sha256:037b0db4c93f44586e37f84c3da3f763874fcac85b2974a69a98e399ac78e1bf \
--hash=sha256:2a1793efcbae3a2264c5e0e492a2629eb10d895d6e5f17dbbd00eb8b489c6bda \ --hash=sha256:de4ffc15065e6ff20a551ad32b6b41264f3c75275675406ddfa8e3530d154be3
--hash=sha256:30a8a148a0fe482cec1aaf942bbd0ade56ec197c14fe058b2a94318c57e1f991 \ requests==2.19.1 \
--hash=sha256:32fbbaf964c5184d3f3e349085b0536dd28184b02e2b014fc900f58bbc126339 \ --hash=sha256:63b52e3c866428a224f97cab011de738c36aec0185aa91cfacd418b5d58911d1 \
--hash=sha256:347d67faee36d449dc9632da411cc318df52959079062627f1243001b10dc227 \ --hash=sha256:ec22d826a36ed72a7358ff3fe56cbd4ba69dd7a6718ffd450ff0e9df7a47ce6a
--hash=sha256:45f4b4e5461a041518baabc52340c249b60833aa84cea6377dc8016a2b33c666 \ setuptools==40.0.0 \
--hash=sha256:4717daec0035034b002d31c42e55431c970e3e38a78211f43990e1b7eaf19e28 \ --hash=sha256:012adb8e25fbfd64c652e99e7bab58799a3aaf05d39ab38561f69190a909015f \
--hash=sha256:51a1ac9e7dda81da444fed8be558a60ec88dfc73b2aa4b0efa310e87acb75838 \ --hash=sha256:d68abee4eed409fbe8c302ac4d8429a1ffef912cd047a903b5701c024048dd49
--hash=sha256:53e9dcc8f14783f6300b70da325a50ac1b0a3dbaee323bd9dc3f71d409c197a1 \ six==1.11.0 \
--hash=sha256:5519a2ed776e193688b7ddb61ab709303f6eb7d1237081e298283c72acc44271 \ --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \
--hash=sha256:583450e8e80a0885c453211ed2bd69ceea634d8c904f23ff8687f677fe810e95 \ --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb
--hash=sha256:60f862bd2a07133585a4fc2ce2b1a8ec24746b07ac44307d22ef2b767cb03435 \ typing==3.6.4 \
--hash=sha256:612091f1d3c84e723bec7cb855cf77576e646045744794c9a3f75ba80737762f \ --hash=sha256:3a887b021a77b292e151afb75323dea88a7bc1b3dfa92176cff8e44c8b68bddf \
--hash=sha256:629a87b87c8203b8789ccefc7f2f2faecd2daaeb56bdd0b4e44cd89565f2db07 \ --hash=sha256:b2c689d54e1144bbcfd191b0832980a21c2dbcf7b5ff7a66248a60c90e951eb8 \
--hash=sha256:6e56ec4c8938fb388b6f250ddd5e21c15e8f25a76e0ad0e2abae9afee09e67b4 \ --hash=sha256:d400a9344254803a2368533e4533a4200d21eb7b6b729c173bc38201a74db3f2
--hash=sha256:8e8092651844a11ec7fa534395f3dfe99256ce4edca06f128efc9d770d6e1dc1 \ urllib3==1.23 \
--hash=sha256:8f5f260629876603e08f3ce95c8ccd9b6b83bf9a921c41409046796267f7adc5 \ --hash=sha256:a68ac5e15e76e7e5dd2b8f94007233e01effe3e50e8daddf69acfd81cb686baf \
--hash=sha256:9a6b74f38613f54c56bd759b411a352258f47489bbefd1d57c930a291498b35b \ --hash=sha256:b5725a0bd4ba422ab0e66e89e030c806576753ea3ee08554382c14e685d117b5
--hash=sha256:a5a13ebb52c4cd065fb673d8c94f39f30823428a4de19e1f3f828b63a8882d1e \ wheel==0.31.1 \
--hash=sha256:a77ca778a476829876a3a70ae880073379160e4a465d057e3c4e1c79acdf1b8a \ --hash=sha256:0a2e54558a0628f2145d2fc822137e322412115173e8a2ddbe1c9024338ae83c \
--hash=sha256:a9f7be3d19f79429c2118fd61bc2ec4fa095e93b56fb3a5f3009822402c4380f \ --hash=sha256:80044e51ec5bbf6c894ba0bc48d26a8c20a9ba629f4ca19ea26ecfcf87685f5f
--hash=sha256:dc15a467c4f9e4b43748ba2f97aea66f67812bfd581818284c47cadc81d4caec \ colorama==0.3.9 \
--hash=sha256:e13cdeea23059f7577c230fd580d2c8178e67ebe10e360041abe86c33c316f1c \ --hash=sha256:463f8483208e921368c9f306094eb6f725c6ca42b0f97e313cb5d5512459feda \
--hash=sha256:e45b85c8521bca6bdfaf57e4987743ade53e9f03529dd3adbc9524094c6d55c4 \ --hash=sha256:48eb22f4f8461b1df5734a074b57042430fb06e1d61bd1e11b078c0fe6d7a1f1
--hash=sha256:e87f17867b260f57c88487f943eb4d46c90532652bb37046e764842c3b66cbb1 \
--hash=sha256:ee40a5b156f6c1192bc3082e9d73d0479904433cdda83110546cd67f5a15a5be \
--hash=sha256:ef63ffde3b267043579af8830fc97fc3b9b8a526a24e3ba23af9989d4e9e689a
pyrsistent==0.15.6 \
--hash=sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b
QDarkStyle==2.6.8 \
--hash=sha256:037a54bf0aa5153f8055b65b8b36ac0d0f7648f2fd906c011a4da22eb0f582a2 \
--hash=sha256:fd1abae37d3a0a004089178da7c0b26ec5eb29f965b3e573853b8f280b614dea
qrcode==6.1 \
--hash=sha256:3996ee560fc39532910603704c82980ff6d4d5d629f9c3f25f34174ce8606cf5 \
--hash=sha256:505253854f607f2abf4d16092c61d4e9d511a3b4392e60bff957a68592b04369
setuptools==42.0.2 \
--hash=sha256:c5b372090d7c8709ce79a6a66872a91e518f7d65af97fca78135e1cb10d4b940 \
--hash=sha256:c8abd0f3574bc23afd2f6fd2c415ba7d9e097c8a99b845473b0d957ba1e2dac6
six==1.13.0 \
--hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
--hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66
typing-extensions==3.7.4.1 \
--hash=sha256:091ecc894d5e908ac75209f10d5b4f118fbdb2eb1ede6a63544054bb1edb41f2 \
--hash=sha256:910f4656f54de5993ad9304959ce9bb903f90aadc7c67a0bef07e678014e892d \
--hash=sha256:cf8b63fedea4d89bab840ecbb93e75578af28f76f66c35889bd7065f5af88575
wheel==0.33.6 \
--hash=sha256:10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646 \
--hash=sha256:f4da1763d3becf2e2cd92a14a7c920f0f00eca30fdde9ea992c836685b9faf28
yarl==1.4.1 \
--hash=sha256:031e8f56cf085d3b3df6b6bce756369ea7052b82d35ea07b6045f209c819e0e5 \
--hash=sha256:074958fe4578ef3a3d0bdaf96bbc25e4c4db82b7ff523594776fcf3d3f16c531 \
--hash=sha256:2db667ee21f620b446a54a793e467714fc5a446fcc82d93a47e8bde01d69afab \
--hash=sha256:326f2dbaaa17b858ae86f261ae73a266fd820a561fc5142cee9d0fc58448fbd7 \
--hash=sha256:32a3885f542f74d0f4f87057050c6b45529ebd79d0639f56582e741521575bfe \
--hash=sha256:56126ef061b913c3eefecace3404ca88917265d0550b8e32bbbeab29e5c830bf \
--hash=sha256:589ac1e82add13fbdedc04eb0a83400db728e5f1af2bd273392088ca90de7062 \
--hash=sha256:6076bce2ecc6ebf6c92919d77762f80f4c9c6ecc9c1fbaa16567ec59ad7d6f1d \
--hash=sha256:63be649c535d18ab6230efbc06a07f7779cd4336a687672defe70c025349a47b \
--hash=sha256:6642cbc92eaffa586180f669adc772f5c34977e9e849e93f33dc142351e98c9c \
--hash=sha256:6fa05a25f2280e78a514041d4609d39962e7d51525f2439db9ad7a2ae7aac163 \
--hash=sha256:7ed006a220422c33ff0889288be24db56ff0a3008ffe9eaead58a690715ad09b \
--hash=sha256:80c9c213803b50899460cc355f47e66778c3c868f448b7b7de5b1f1858c82c2a \
--hash=sha256:8bae18e2129850e76969b57869dacc72a66cccdbeebce1a28d7f3d439c21a7a3 \
--hash=sha256:ab112fba996a8f48f427e26969f2066d50080df0c24007a8cc6d7ae865e19013 \
--hash=sha256:b1c178ef813940c9a5cbad42ab7b8b76ac08b594b0a6bad91063c968e0466efc \
--hash=sha256:d6eff151c3b23a56a5e4f496805619bc3bdf4f749f63a7a95ad50e8267c17475
zipp==0.6.0 \
--hash=sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e \
--hash=sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335
colorama==0.4.1 \
--hash=sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d \
--hash=sha256:f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48

4
contrib/freeze_packages.sh Normal file → Executable file
View file

@ -1,8 +1,6 @@
#!/bin/bash #!/bin/bash
# Run this after a new release to update dependencies # Run this after a new release to update dependencies
set -e
venv_dir=~/.electrum-venv venv_dir=~/.electrum-venv
contrib=$(dirname "$0") contrib=$(dirname "$0")
@ -10,7 +8,7 @@ which virtualenv > /dev/null 2>&1 || { echo "Please install virtualenv" && exit
python3 -m hashin -h > /dev/null 2>&1 || { python3 -m pip install hashin; } python3 -m hashin -h > /dev/null 2>&1 || { python3 -m pip install hashin; }
other_python=$(which python3) other_python=$(which python3)
for i in '' '-hw' '-binaries' '-wine-build'; do for i in '' '-hw' '-binaries'; do
rm -rf "$venv_dir" rm -rf "$venv_dir"
virtualenv -p $(which python3) $venv_dir virtualenv -p $(which python3) $venv_dir

41
contrib/make_apk Normal file → Executable file
View file

@ -1,26 +1,7 @@
#!/bin/bash #!/bin/bash
set -e
CONTRIB="$(dirname "$(readlink -e "$0")")"
ROOT_FOLDER="$CONTRIB"/..
PACKAGES="$ROOT_FOLDER"/packages/
LOCALE="$ROOT_FOLDER"/electrum/locale/
if [ ! -d "$LOCALE" ]; then
echo "Run pull_locale first!"
exit 1
fi
if [ ! -d "$PACKAGES" ]; then
echo "Run make_packages first!"
exit 1
fi
pushd ./electrum/gui/kivy/ pushd ./electrum/gui/kivy/
make theming
if [[ -n "$1" && "$1" == "release" ]] ; then if [[ -n "$1" && "$1" == "release" ]] ; then
echo -n Keystore Password: echo -n Keystore Password:
read -s password read -s password
@ -28,31 +9,9 @@ if [[ -n "$1" && "$1" == "release" ]] ; then
export P4A_RELEASE_KEYSTORE_PASSWD=$password export P4A_RELEASE_KEYSTORE_PASSWD=$password
export P4A_RELEASE_KEYALIAS_PASSWD=$password export P4A_RELEASE_KEYALIAS_PASSWD=$password
export P4A_RELEASE_KEYALIAS=electrum export P4A_RELEASE_KEYALIAS=electrum
# build two apks
export APP_ANDROID_ARCH=armeabi-v7a
make release
export APP_ANDROID_ARCH=arm64-v8a
make release make release
else else
export P4A_DEBUG_KEYSTORE="$CONTRIB"/android_debug.keystore
export P4A_DEBUG_KEYSTORE_PASSWD=unsafepassword
export P4A_DEBUG_KEYALIAS_PASSWD=unsafepassword
export P4A_DEBUG_KEYALIAS=electrum
# create keystore if needed
if [ ! -f "$P4A_DEBUG_KEYSTORE" ]; then
keytool -genkey -v -keystore "$CONTRIB"/android_debug.keystore \
-alias "$P4A_DEBUG_KEYALIAS" -keyalg RSA -keysize 2048 -validity 10000 \
-dname "CN=mqttserver.ibm.com, OU=ID, O=IBM, L=Hursley, S=Hants, C=GB" \
-storepass "$P4A_DEBUG_KEYSTORE_PASSWD" \
-keypass "$P4A_DEBUG_KEYALIAS_PASSWD"
fi
# build two apks (only one on Travis CI)
export APP_ANDROID_ARCH=armeabi-v7a
make apk make apk
if [ ! $CI ]; then
export APP_ANDROID_ARCH=arm64-v8a
make apk
fi
fi fi
popd popd

10
contrib/make_download Normal file → Executable file
View file

@ -2,15 +2,8 @@
import re import re
import os import os
import sys import sys
import importlib
# load version.py; needlessly complicated alternative to "imp.load_source": from electrum.version import ELECTRUM_VERSION, APK_VERSION
version_spec = importlib.util.spec_from_file_location('version', 'electrum/version.py')
version_module = importlib.util.module_from_spec(version_spec)
version_spec.loader.exec_module(version_module)
ELECTRUM_VERSION = version_module.ELECTRUM_VERSION
APK_VERSION = version_module.APK_VERSION
print("version", ELECTRUM_VERSION) print("version", ELECTRUM_VERSION)
dirname = sys.argv[1] dirname = sys.argv[1]
@ -31,7 +24,6 @@ string = string.replace("##VERSION_APK##", APK_VERSION)
files = { files = {
'tgz': "Electrum-%s.tar.gz" % version, 'tgz': "Electrum-%s.tar.gz" % version,
'appimage': "electrum-%s-x86_64.AppImage" % version,
'zip': "Electrum-%s.zip" % version, 'zip': "Electrum-%s.zip" % version,
'mac': "electrum-%s.dmg" % version_mac, 'mac': "electrum-%s.dmg" % version_mac,
'win': "electrum-%s.exe" % version_win, 'win': "electrum-%s.exe" % version_win,

View file

@ -1,49 +0,0 @@
#!/bin/bash
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
set -e
. $(dirname "$0")/build_tools_util.sh || (echo "Could not source build_tools_util.sh" && exit 1)
here=$(dirname $(realpath "$0" 2> /dev/null || grealpath "$0"))
CONTRIB="$here"
PROJECT_ROOT="$CONTRIB/.."
pkgname="secp256k1"
info "Building $pkgname..."
(
cd $CONTRIB
if [ ! -d secp256k1 ]; then
git clone https://github.com/bitcoin-core/secp256k1.git
fi
cd secp256k1
git reset --hard
git clean -f -x -q
git checkout $LIBSECP_VERSION
if ! [ -x configure ] ; then
echo "libsecp256k1_la_LDFLAGS = -no-undefined" >> Makefile.am
echo "LDFLAGS = -no-undefined" >> Makefile.am
./autogen.sh || fail "Could not run autogen for $pkgname. Please make sure you have automake and libtool installed, and try again."
fi
if ! [ -r config.status ] ; then
./configure \
$AUTOCONF_FLAGS \
--prefix="$here/$pkgname/dist" \
--enable-module-recovery \
--enable-experimental \
--enable-module-ecdh \
--disable-jni \
--disable-tests \
--disable-static \
--enable-shared || fail "Could not configure $pkgname. Please make sure you have a C compiler installed and try again."
fi
make -j4 || fail "Could not build $pkgname"
make install || fail "Could not install $pkgname"
. "$here/$pkgname/dist/lib/libsecp256k1.la"
host_strip "$here/$pkgname/dist/lib/$dlname"
cp -fpv "$here/$pkgname/dist/lib/$dlname" "$PROJECT_ROOT/electrum" || fail "Could not copy the $pkgname binary to its destination"
info "$dlname has been placed in the inner 'electrum' folder."
)

0
contrib/make_locale Normal file → Executable file
View file

11
contrib/make_packages Normal file → Executable file
View file

@ -1,10 +1,13 @@
#!/bin/bash #!/bin/bash
CONTRIB="$(dirname "$0")" contrib=$(dirname "$0")
test -n "$CONTRIB" -a -d "$CONTRIB" || exit test -n "$contrib" -a -d "$contrib" || exit
rm "$CONTRIB"/../packages/ -r whereis pip3
if [ $? -ne 0 ] ; then echo "Install pip3" ; exit ; fi
rm "$contrib"/../packages/ -r
#Install pure python modules in electrum directory #Install pure python modules in electrum directory
python3 -m pip install -r "$CONTRIB"/deterministic-build/requirements.txt -t "$CONTRIB"/../packages pip3 install -r $contrib/deterministic-build/requirements.txt -t $contrib/../packages

44
contrib/make_tgz Normal file → Executable file
View file

@ -1,43 +1 @@
#!/bin/bash python3 setup.py sdist --format=zip,gztar
set -e
CONTRIB="$(dirname "$(readlink -e "$0")")"
ROOT_FOLDER="$CONTRIB"/..
PACKAGES="$ROOT_FOLDER"/packages/
LOCALE="$ROOT_FOLDER"/electrum/locale/
if [ ! -d "$PACKAGES" ]; then
echo "Run make_packages first!"
exit 1
fi
git submodule update --init
(
rm -rf "$LOCALE"
cd "$CONTRIB/deterministic-build/electrum-locale/"
if ! which msgfmt > /dev/null 2>&1; then
echo "Please install gettext"
exit 1
fi
for i in ./locale/*; do
dir="$ROOT_FOLDER"/electrum/$i/LC_MESSAGES
mkdir -p $dir
msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true
cp $i/electrum.po "$ROOT_FOLDER"/electrum/$i/electrum.po
done
)
(
cd "$ROOT_FOLDER"
echo "'git clean -fd' would delete the following files: >>>"
git clean -fd --dry-run
echo "<<<"
# we could build the kivy atlas potentially?
#(cd electrum/gui/kivy/; make theming) || echo "building kivy atlas failed! skipping."
python3 setup.py --quiet sdist --format=zip,gztar
)

@ -1 +0,0 @@
Subproject commit 59dfc03272751cd29ee311456fa34c40f7ebb7c0

View file

@ -1,72 +0,0 @@
Building Mac OS binaries
========================
✗ _This script does not produce reproducible output (yet!).
Please help us remedy this._
This guide explains how to build Electrum binaries for macOS systems.
## 1. Building the binary
This needs to be done on a system running macOS or OS X. We use El Capitan (10.11.6) as building it
on High Sierra (or later)
makes the binaries [incompatible with older versions](https://github.com/pyinstaller/pyinstaller/issues/1191).
Another factor for the minimum supported macOS version is the
[bundled Qt version](https://github.com/spesmilo/electrum/issues/3685).
Before starting, make sure that the Xcode command line tools are installed (e.g. you have `git`).
#### 1.1a Get Xcode
Building the QR scanner (CalinsQRReader) requires full Xcode (not just command line tools).
The last Xcode version compatible with El Capitan is Xcode 8.2.1
Get it from [here](https://developer.apple.com/download/more/).
Unfortunately, you need an "Apple ID" account.
After downloading, uncompress it.
Make sure it is the "selected" xcode (e.g.):
sudo xcode-select -s $HOME/Downloads/Xcode.app/Contents/Developer/
#### 1.1b Build QR scanner separately on newer Mac
Alternatively, you can try building just the QR scanner on newer macOS.
On newer Mac, run:
pushd contrib/osx/CalinsQRReader; xcodebuild; popd
cp -r contrib/osx/CalinsQRReader/build prebuilt_qr
Move `prebuilt_qr` to El Capitan: `contrib/osx/CalinsQRReader/prebuilt_qr`.
#### 1.2 Build Electrum
cd electrum
./contrib/osx/make_osx
This creates both a folder named Electrum.app and the .dmg file.
## 2. Building the image deterministically (WIP)
The usual way to distribute macOS applications is to use image files containing the
application. Although these images can be created on a Mac with the built-in `hdiutil`,
they are not deterministic.
Instead, we use the toolchain that Bitcoin uses: genisoimage and libdmg-hfsplus.
These tools do not work on macOS, so you need a separate Linux machine (or VM).
Copy the Electrum.app directory over and install the dependencies, e.g.:
apt install libcap-dev cmake make gcc faketime
Then you can just invoke `package.sh` with the path to the app:
cd electrum
./contrib/osx/package.sh ~/Electrum.app/

View file

@ -1,23 +0,0 @@
#!/usr/bin/env bash
. $(dirname "$0")/../build_tools_util.sh
function DoCodeSignMaybe { # ARGS: infoName fileOrDirName codesignIdentity
infoName="$1"
file="$2"
identity="$3"
deep=""
if [ -z "$identity" ]; then
# we are ok with them not passing anything; master script calls us unconditionally even if no identity is specified
return
fi
if [ -d "$file" ]; then
deep="--deep"
fi
if [ -z "$infoName" ] || [ -z "$file" ] || [ -z "$identity" ] || [ ! -e "$file" ]; then
fail "Argument error to internal function DoCodeSignMaybe()"
fi
info "Code signing ${infoName}..."
codesign -f -v $deep -s "$identity" "$file" || fail "Could not code sign ${infoName}"
}

View file

@ -1,86 +0,0 @@
--- cdrkit-1.1.11.old/genisoimage/tree.c 2008-10-21 19:57:47.000000000 -0400
+++ cdrkit-1.1.11/genisoimage/tree.c 2013-12-06 00:23:18.489622668 -0500
@@ -1139,8 +1139,9 @@
scan_directory_tree(struct directory *this_dir, char *path,
struct directory_entry *de)
{
- DIR *current_dir;
+ int current_file;
char whole_path[PATH_MAX];
+ struct dirent **d_list;
struct dirent *d_entry;
struct directory *parent;
int dflag;
@@ -1164,7 +1165,8 @@
this_dir->dir_flags |= DIR_WAS_SCANNED;
errno = 0; /* Paranoia */
- current_dir = opendir(path);
+ //current_dir = opendir(path);
+ current_file = scandir(path, &d_list, NULL, alphasort);
d_entry = NULL;
/*
@@ -1173,12 +1175,12 @@
*/
old_path = path;
- if (current_dir) {
+ if (current_file >= 0) {
errno = 0;
- d_entry = readdir(current_dir);
+ d_entry = d_list[0];
}
- if (!current_dir || !d_entry) {
+ if (current_file < 0 || !d_entry) {
int ret = 1;
#ifdef USE_LIBSCHILY
@@ -1191,8 +1193,8 @@
de->isorec.flags[0] &= ~ISO_DIRECTORY;
ret = 0;
}
- if (current_dir)
- closedir(current_dir);
+ if(d_list)
+ free(d_list);
return (ret);
}
#ifdef ABORT_DEEP_ISO_ONLY
@@ -1208,7 +1210,7 @@
errmsgno(EX_BAD, "use Rock Ridge extensions via -R or -r,\n");
errmsgno(EX_BAD, "or allow deep ISO9660 directory nesting via -D.\n");
}
- closedir(current_dir);
+ free(d_list);
return (1);
}
#endif
@@ -1250,13 +1252,13 @@
* The first time through, skip this, since we already asked
* for the first entry when we opened the directory.
*/
- if (dflag)
- d_entry = readdir(current_dir);
+ if (dflag && current_file >= 0)
+ d_entry = d_list[current_file];
dflag++;
- if (!d_entry)
+ if (current_file < 0)
break;
-
+ current_file--;
/* OK, got a valid entry */
/* If we do not want all files, then pitch the backups. */
@@ -1348,7 +1350,7 @@
insert_file_entry(this_dir, whole_path, d_entry->d_name);
#endif /* APPLE_HYB */
}
- closedir(current_dir);
+ free(d_list);
#ifdef APPLE_HYB
/*

View file

@ -1,152 +0,0 @@
#!/usr/bin/env bash
# Parameterize
PYTHON_VERSION=3.7.6
BUILDDIR=/tmp/electrum-build
PACKAGE=Electrum
GIT_REPO=https://github.com/spesmilo/electrum
LIBSECP_VERSION="b408c6a8b287003d1ade5709e6f7bc3c7f1d5be7"
export GCC_STRIP_BINARIES="1"
. $(dirname "$0")/base.sh
CONTRIB_OSX="$(dirname "$(realpath "$0")")"
CONTRIB="$CONTRIB_OSX/.."
ROOT_FOLDER="$CONTRIB/.."
src_dir=$(dirname "$0")
cd $src_dir/../..
VERSION=`git describe --tags --dirty --always`
which brew > /dev/null 2>&1 || fail "Please install brew from https://brew.sh/ to continue"
which xcodebuild > /dev/null 2>&1 || fail "Please install Xcode and xcode command line tools to continue"
# Code Signing: See https://developer.apple.com/library/archive/documentation/Security/Conceptual/CodeSigningGuide/Procedures/Procedures.html
APP_SIGN=""
if [ -n "$1" ]; then
# Test the identity is valid for signing by doing this hack. There is no other way to do this.
cp -f /bin/ls ./CODESIGN_TEST
codesign -s "$1" --dryrun -f ./CODESIGN_TEST > /dev/null 2>&1
res=$?
rm -f ./CODESIGN_TEST
if ((res)); then
fail "Code signing identity \"$1\" appears to be invalid."
fi
unset res
APP_SIGN="$1"
info "Code signing enabled using identity \"$APP_SIGN\""
else
warn "Code signing DISABLED. Specify a valid macOS Developer identity installed on the system as the first argument to this script to enable signing."
fi
info "Installing Python $PYTHON_VERSION"
export PATH="~/.pyenv/bin:~/.pyenv/shims:~/Library/Python/3.7/bin:$PATH"
if [ -d "~/.pyenv" ]; then
pyenv update
else
curl -L https://raw.githubusercontent.com/pyenv/pyenv-installer/master/bin/pyenv-installer | bash > /dev/null 2>&1
fi
PYTHON_CONFIGURE_OPTS="--enable-framework" pyenv install -s $PYTHON_VERSION && \
pyenv global $PYTHON_VERSION || \
fail "Unable to use Python $PYTHON_VERSION"
info "install dependencies specific to binaries"
# note that this also installs pinned versions of both pip and setuptools
python3 -m pip install --no-dependencies -Ir ./contrib/deterministic-build/requirements-binaries.txt --user \
|| fail "Could not install pyinstaller"
info "Installing pyinstaller"
python3 -m pip install -I --user pyinstaller==3.6 || fail "Could not install pyinstaller"
info "Using these versions for building $PACKAGE:"
sw_vers
python3 --version
echo -n "Pyinstaller "
pyinstaller --version
rm -rf ./dist
git submodule update --init
rm -rf $BUILDDIR > /dev/null 2>&1
mkdir $BUILDDIR
info "generating locale"
(
if ! which msgfmt > /dev/null 2>&1; then
brew install gettext
brew link --force gettext
fi
cd "$CONTRIB"/deterministic-build/electrum-locale
for i in ./locale/*; do
dir="$ROOT_FOLDER"/electrum/$i/LC_MESSAGES
mkdir -p $dir
msgfmt --output-file=$dir/electrum.mo $i/electrum.po || true
done
) || fail "failed generating locale"
info "Downloading libusb..."
curl https://homebrew.bintray.com/bottles/libusb-1.0.22.el_capitan.bottle.tar.gz | \
tar xz --directory $BUILDDIR
cp $BUILDDIR/libusb/1.0.22/lib/libusb-1.0.dylib contrib/osx
echo "82c368dfd4da017ceb32b12ca885576f325503428a4966cc09302cbd62702493 contrib/osx/libusb-1.0.dylib" | \
shasum -a 256 -c || fail "libusb checksum mismatched"
info "Preparing for building libsecp256k1"
brew install autoconf automake libtool
"$CONTRIB"/make_libsecp256k1.sh || fail "Could not build libsecp"
cp "$ROOT_FOLDER"/electrum/libsecp256k1.0.dylib contrib/osx
info "Building CalinsQRReader..."
d=contrib/osx/CalinsQRReader
pushd $d
rm -fr build
# prefer building using xcode ourselves. otherwise fallback to prebuilt binary
xcodebuild || cp -r prebuilt_qr build || fail "Could not build CalinsQRReader"
popd
DoCodeSignMaybe "CalinsQRReader.app" "${d}/build/Release/CalinsQRReader.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
info "Installing requirements..."
python3 -m pip install --no-dependencies -Ir ./contrib/deterministic-build/requirements.txt --user || \
fail "Could not install requirements"
info "Installing hardware wallet requirements..."
python3 -m pip install --no-dependencies -Ir ./contrib/deterministic-build/requirements-hw.txt --user || \
fail "Could not install hardware wallet requirements"
info "Building $PACKAGE..."
python3 -m pip install --no-dependencies --user . > /dev/null || fail "Could not build $PACKAGE"
info "Faking timestamps..."
for d in ~/Library/Python/ ~/.pyenv .; do
pushd $d
find . -exec touch -t '200101220000' {} +
popd
done
info "Building binary"
APP_SIGN="$APP_SIGN" pyinstaller --noconfirm --ascii --clean --name $VERSION contrib/osx/osx.spec || fail "Could not build binary"
info "Adding bitcoin URI types to Info.plist"
plutil -insert 'CFBundleURLTypes' \
-xml '<array><dict> <key>CFBundleURLName</key> <string>bitcoin</string> <key>CFBundleURLSchemes</key> <array><string>bitcoin</string></array> </dict></array>' \
-- dist/$PACKAGE.app/Contents/Info.plist \
|| fail "Could not add keys to Info.plist. Make sure the program 'plutil' exists and is installed."
DoCodeSignMaybe "app bundle" "dist/${PACKAGE}.app" "$APP_SIGN" # If APP_SIGN is empty will be a noop
info "Creating .DMG"
hdiutil create -fs HFS+ -volname $PACKAGE -srcfolder dist/$PACKAGE.app dist/electrum-$VERSION.dmg || fail "Could not create .DMG"
DoCodeSignMaybe ".DMG" "dist/electrum-${VERSION}.dmg" "$APP_SIGN" # If APP_SIGN is empty will be a noop
if [ -z "$APP_SIGN" ]; then
warn "App was built successfully but was not code signed. Users may get security warnings from macOS."
warn "Specify a valid code signing identity as the first argument to this script to enable code signing."
fi

View file

@ -1,165 +0,0 @@
# -*- mode: python -*-
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, collect_dynamic_libs
import sys, os
PACKAGE='Electrum'
PYPKG='electrum'
MAIN_SCRIPT='run_electrum'
ICONS_FILE=PYPKG + '/gui/icons/electrum.icns'
APP_SIGN = os.environ.get('APP_SIGN', '')
def fail(*msg):
RED='\033[0;31m'
NC='\033[0m' # No Color
print("\r🗯 {}ERROR:{}".format(RED, NC), *msg)
sys.exit(1)
def codesign(identity, binary):
d = os.path.dirname(binary)
saved_dir=None
if d:
# switch to directory of the binary so codesign verbose messages don't include long path
saved_dir = os.path.abspath(os.path.curdir)
os.chdir(d)
binary = os.path.basename(binary)
os.system("codesign -v -f -s '{}' '{}'".format(identity, binary))==0 or fail("Could not code sign " + binary)
if saved_dir:
os.chdir(saved_dir)
def monkey_patch_pyinstaller_for_codesigning(identity):
# Monkey-patch PyInstaller so that we app-sign all binaries *after* they are modified by PyInstaller
# If we app-sign before that point, the signature will be invalid because PyInstaller modifies
# @loader_path in the Mach-O loader table.
try:
import PyInstaller.depend.dylib
_saved_func = PyInstaller.depend.dylib.mac_set_relative_dylib_deps
except (ImportError, NameError, AttributeError):
# Hmm. Likely wrong PyInstaller version.
fail("Could not monkey-patch PyInstaller for code signing. Please ensure that you are using PyInstaller 3.4.")
_signed = set()
def my_func(fn, distname):
_saved_func(fn, distname)
if (fn, distname) not in _signed:
codesign(identity, fn)
_signed.add((fn,distname)) # remember we signed it so we don't sign again
PyInstaller.depend.dylib.mac_set_relative_dylib_deps = my_func
for i, x in enumerate(sys.argv):
if x == '--name':
VERSION = sys.argv[i+1]
break
else:
raise Exception('no version')
electrum = os.path.abspath(".") + "/"
block_cipher = None
# see https://github.com/pyinstaller/pyinstaller/issues/2005
hiddenimports = []
hiddenimports += collect_submodules('trezorlib')
hiddenimports += collect_submodules('safetlib')
hiddenimports += collect_submodules('btchip')
hiddenimports += collect_submodules('keepkeylib')
hiddenimports += collect_submodules('websocket')
hiddenimports += collect_submodules('ckcc')
hiddenimports += ['PyQt5.QtPrintSupport'] # needed by Revealer
# safetlib imports PyQt5.Qt. We use a local updated copy of pinmatrix.py until they
# release a new version that includes https://github.com/archos-safe-t/python-safet/commit/b1eab3dba4c04fdfc1fcf17b66662c28c5f2380e
hiddenimports.remove('safetlib.qt.pinmatrix')
datas = [
(electrum + PYPKG + '/*.json', PYPKG),
(electrum + PYPKG + '/wordlist/english.txt', PYPKG + '/wordlist'),
(electrum + PYPKG + '/locale', PYPKG + '/locale'),
(electrum + PYPKG + '/plugins', PYPKG + '/plugins'),
(electrum + PYPKG + '/gui/icons', PYPKG + '/gui/icons'),
]
datas += collect_data_files('trezorlib')
datas += collect_data_files('safetlib')
datas += collect_data_files('btchip')
datas += collect_data_files('keepkeylib')
datas += collect_data_files('ckcc')
datas += collect_data_files('jsonrpcserver')
datas += collect_data_files('jsonrpcclient')
# Add the QR Scanner helper app
datas += [(electrum + "contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app", "./contrib/osx/CalinsQRReader/build/Release/CalinsQRReader.app")]
# Add libusb so Trezor and Safe-T mini will work
binaries = [(electrum + "contrib/osx/libusb-1.0.dylib", ".")]
binaries += [(electrum + "contrib/osx/libsecp256k1.0.dylib", ".")]
# Workaround for "Retro Look":
binaries += [b for b in collect_dynamic_libs('PyQt5') if 'macstyle' in b[0]]
# We don't put these files in to actually include them in the script but to make the Analysis method scan them for imports
a = Analysis([electrum+ MAIN_SCRIPT,
electrum+'electrum/gui/qt/main_window.py',
electrum+'electrum/gui/text.py',
electrum+'electrum/util.py',
electrum+'electrum/wallet.py',
electrum+'electrum/simple_config.py',
electrum+'electrum/bitcoin.py',
electrum+'electrum/dnssec.py',
electrum+'electrum/commands.py',
electrum+'electrum/plugins/cosigner_pool/qt.py',
electrum+'electrum/plugins/email_requests/qt.py',
electrum+'electrum/plugins/trezor/qt.py',
electrum+'electrum/plugins/safe_t/client.py',
electrum+'electrum/plugins/safe_t/qt.py',
electrum+'electrum/plugins/keepkey/qt.py',
electrum+'electrum/plugins/ledger/qt.py',
electrum+'electrum/plugins/coldcard/qt.py',
],
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[])
# http://stackoverflow.com/questions/19055089/pyinstaller-onefile-warning-pyconfig-h-when-importing-scipy-or-scipy-signal
for d in a.datas:
if 'pyconfig' in d[0]:
a.datas.remove(d)
break
# Strip out parts of Qt that we never use. Reduces binary size by tens of MBs. see #4815
qt_bins2remove=('qtweb', 'qt3d', 'qtgame', 'qtdesigner', 'qtquick', 'qtlocation', 'qttest', 'qtxml')
print("Removing Qt binaries:", *qt_bins2remove)
for x in a.binaries.copy():
for r in qt_bins2remove:
if x[0].lower().startswith(r):
a.binaries.remove(x)
print('----> Removed x =', x)
# If code signing, monkey-patch in a code signing step to pyinstaller. See: https://github.com/spesmilo/electrum/issues/4994
if APP_SIGN:
monkey_patch_pyinstaller_for_codesigning(APP_SIGN)
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.datas,
name=PACKAGE,
debug=False,
strip=False,
upx=True,
icon=electrum+ICONS_FILE,
console=False)
app = BUNDLE(exe,
version = VERSION,
name=PACKAGE + '.app',
icon=electrum+ICONS_FILE,
bundle_identifier=None,
info_plist={
'NSHighResolutionCapable': 'True',
'NSSupportsAutomaticGraphicsSwitching': 'True'
}
)

View file

@ -1,88 +0,0 @@
#!/usr/bin/env bash
cdrkit_version=1.1.11
cdrkit_download_path=http://distro.ibiblio.org/fatdog/source/600/c
cdrkit_file_name=cdrkit-${cdrkit_version}.tar.bz2
cdrkit_sha256_hash=b50d64c214a65b1a79afe3a964c691931a4233e2ba605d793eb85d0ac3652564
cdrkit_patches=cdrkit-deterministic.patch
genisoimage=genisoimage-$cdrkit_version
libdmg_url=https://github.com/theuni/libdmg-hfsplus
export LD_PRELOAD=$(locate libfaketime.so.1)
export FAKETIME="2000-01-22 00:00:00"
export PATH=$PATH:~/bin
. $(dirname "$0")/base.sh
if [ -z "$1" ]; then
echo "Usage: $0 Electrum.app"
exit -127
fi
mkdir -p ~/bin
if ! which ${genisoimage} > /dev/null 2>&1; then
mkdir -p /tmp/electrum-macos
cd /tmp/electrum-macos
info "Downloading cdrkit $cdrkit_version"
wget -nc ${cdrkit_download_path}/${cdrkit_file_name}
tar xvf ${cdrkit_file_name}
info "Patching genisoimage"
cd cdrkit-${cdrkit_version}
patch -p1 < ../cdrkit-deterministic.patch
info "Building genisoimage"
cmake . -Wno-dev
make genisoimage
cp genisoimage/genisoimage ~/bin/${genisoimage}
fi
if ! which dmg > /dev/null 2>&1; then
mkdir -p /tmp/electrum-macos
cd /tmp/electrum-macos
info "Downloading libdmg"
LD_PRELOAD= git clone ${libdmg_url}
cd libdmg-hfsplus
info "Building libdmg"
cmake .
make
cp dmg/dmg ~/bin
fi
${genisoimage} -version || fail "Unable to install genisoimage"
dmg -|| fail "Unable to install libdmg"
plist=$1/Contents/Info.plist
test -f "$plist" || fail "Info.plist not found"
VERSION=$(grep -1 ShortVersionString $plist |tail -1|gawk 'match($0, /<string>(.*)<\/string>/, a) {print a[1]}')
echo $VERSION
rm -rf /tmp/electrum-macos/image > /dev/null 2>&1
mkdir /tmp/electrum-macos/image/
cp -r $1 /tmp/electrum-macos/image/
build_dir=$(dirname "$1")
test -n "$build_dir" -a -d "$build_dir" || exit
cd $build_dir
${genisoimage} \
-no-cache-inodes \
-D \
-l \
-probe \
-V "Electrum" \
-no-pad \
-r \
-dir-mode 0755 \
-apple \
-o Electrum_uncompressed.dmg \
/tmp/electrum-macos/image || fail "Unable to create uncompressed dmg"
dmg dmg Electrum_uncompressed.dmg electrum-$VERSION.dmg || fail "Unable to create compressed dmg"
rm Electrum_uncompressed.dmg
echo "Done."
sha256sum electrum-$VERSION.dmg

View file

@ -1,65 +0,0 @@
#!/usr/bin/env python3
import os
import subprocess
import io
import zipfile
import sys
try:
import requests
except ImportError as e:
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
os.chdir(os.path.dirname(os.path.realpath(__file__)))
os.chdir('..')
cmd = "find electrum -type f -name '*.py' -o -name '*.kv'"
files = subprocess.check_output(cmd, shell=True)
with open("app.fil", "wb") as f:
f.write(files)
print("Found {} files to translate".format(len(files.splitlines())))
# Generate fresh translation template
if not os.path.exists('electrum/locale'):
os.mkdir('electrum/locale')
cmd = 'xgettext -s --from-code UTF-8 --language Python --no-wrap -f app.fil --output=electrum/locale/messages.pot'
print('Generate template')
os.system(cmd)
os.chdir('electrum')
crowdin_identifier = 'electrum'
crowdin_file_name = 'files[electrum-client/messages.pot]'
locale_file_name = 'locale/messages.pot'
# Download & unzip
print('Download translations')
s = requests.request('GET', 'https://crowdin.com/backend/download/project/' + crowdin_identifier + '.zip').content
zfobj = zipfile.ZipFile(io.BytesIO(s))
print('Unzip translations')
for name in zfobj.namelist():
if not name.startswith('electrum-client/locale'):
continue
if name.endswith('/'):
if not os.path.exists(name[16:]):
os.mkdir(name[16:])
else:
with open(name[16:], 'wb') as output:
output.write(zfobj.read(name))
# Convert .po to .mo
print('Installing')
for lang in os.listdir('locale'):
if lang.startswith('messages'):
continue
# Check LC_MESSAGES folder
mo_dir = 'locale/%s/LC_MESSAGES' % lang
if not os.path.exists(mo_dir):
os.mkdir(mo_dir)
cmd = 'msgfmt --output-file="%s/electrum.mo" "locale/%s/electrum.po"' % (mo_dir,lang)
print('Installing', lang)
os.system(cmd)

View file

@ -1,59 +0,0 @@
#!/usr/bin/env python3
import os
import subprocess
import io
import zipfile
import sys
try:
import requests
except ImportError as e:
sys.exit(f"Error: {str(e)}. Try 'sudo python3 -m pip install <module-name>'")
os.chdir(os.path.dirname(os.path.realpath(__file__)))
os.chdir('..')
cmd = "find electrum -type f -name '*.py' -o -name '*.kv'"
files = subprocess.check_output(cmd, shell=True)
with open("app.fil", "wb") as f:
f.write(files)
print("Found {} files to translate".format(len(files.splitlines())))
# Generate fresh translation template
if not os.path.exists('electrum/locale'):
os.mkdir('electrum/locale')
cmd = 'xgettext -s --from-code UTF-8 --language Python --no-wrap -f app.fil --output=electrum/locale/messages.pot'
print('Generate template')
os.system(cmd)
os.chdir('electrum')
crowdin_identifier = 'electrum'
crowdin_file_name = 'files[electrum-client/messages.pot]'
locale_file_name = 'locale/messages.pot'
crowdin_api_key = None
filename = os.path.expanduser('~/.crowdin_api_key')
if os.path.exists(filename):
with open(filename) as f:
crowdin_api_key = f.read().strip()
if "crowdin_api_key" in os.environ:
crowdin_api_key = os.environ["crowdin_api_key"]
if crowdin_api_key:
# Push to Crowdin
print('Push to Crowdin')
url = ('https://api.crowdin.com/api/project/' + crowdin_identifier + '/update-file?key=' + crowdin_api_key)
with open(locale_file_name, 'rb') as f:
files = {crowdin_file_name: f}
response = requests.request('POST', url, files=files)
print("", "update-file:", "-"*20, response.text, "-"*20, sep="\n")
# Build translations
print('Build translations')
response = requests.request('GET', 'https://api.crowdin.com/api/project/' + crowdin_identifier + '/export?key=' + crowdin_api_key)
print("", "export:", "-" * 20, response.text, "-" * 20, sep="\n")

View file

@ -1,2 +1,2 @@
PyQt5<5.12 PyQt5<5.11
PyQt5-sip<=4.19.13 pycryptodomex

View file

@ -1,16 +1,8 @@
# Note: hidapi requires Cython as a build-time dependency (it is not needed at runtime).
# For reproducible builds, the version of Cython must be pinned down.
# Further, the pinned Cython must be installed before hidapi is built;
# otherwise hidapi just downloads the latest Cython. To enforce order,
# Cython must be listed before hidapi. Notably this also applies to
# deterministic-build/requirements-hw.txt where items are lexicographically sorted.
# Hence, we rely on "Cython" preceding "hidapi" lexicographically... :/
# see https://github.com/spesmilo/electrum/issues/5859
Cython>=0.27 Cython>=0.27
trezor[hidapi]>=0.9.0
trezor[hidapi]>=0.11.5
safet[hidapi]>=0.1.0 safet[hidapi]>=0.1.0
keepkey>=6.3.1 keepkey
btchip-python>=0.1.26 btchip-python
ckcc-protocol>=0.7.7 ckcc-protocol>=0.7.2
websocket-client
hidapi hidapi

View file

@ -1,7 +0,0 @@
pip
setuptools
# needed by pyinstaller:
pefile>=2017.8.1
altgraph
pywin32-ctypes>=0.2.0

View file

@ -1,15 +1,10 @@
pyaes>=0.1a1 pyaes>=0.1a1
ecdsa>=0.14 ecdsa>=0.9
requests
qrcode qrcode
protobuf protobuf
dnspython dnspython
qdarkstyle<2.7 jsonrpclib-pelix
aiorpcx>=0.18,<0.19 PySocks>=1.6.6
aiohttp>=3.3.0,<4.0.0 qdarkstyle<3.0
aiohttp_socks typing>=3.0.0
certifi
bitstring
pycryptodomex>=3.7
jsonrpcserver
jsonrpcclient
attrs

0
contrib/sign_packages Normal file → Executable file
View file

View file

@ -1,4 +0,0 @@
#!/bin/bash
version=`python3 -c "import electrum; print(electrum.version.ELECTRUM_VERSION)"`
sig=`./run_electrum -w $SIGNING_WALLET signmessage $SIGNING_ADDRESS $version`
echo "{ \"version\":\"$version\", \"signatures\":{ \"$SIGNING_ADDRESS\":\"$sig\"}}"

View file

@ -1,12 +0,0 @@
# HW.1 / Nano
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2581", ATTRS{idProduct}=="1b7c|2b7c|3b7c|4b7c", TAG+="uaccess", TAG+="udev-acl"
# Blue
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2c97", ATTRS{idProduct}=="0000|0000|0001|0002|0003|0004|0005|0006|0007|0008|0009|000a|000b|000c|000d|000e|000f|0010|0011|0012|0013|0014|0015|0016|0017|0018|0019|001a|001b|001c|001d|001e|001f", TAG+="uaccess", TAG+="udev-acl"
# Nano S
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2c97", ATTRS{idProduct}=="0001|1000|1001|1002|1003|1004|1005|1006|1007|1008|1009|100a|100b|100c|100d|100e|100f|1010|1011|1012|1013|1014|1015|1016|1017|1018|1019|101a|101b|101c|101d|101e|101f", TAG+="uaccess", TAG+="udev-acl"
# Aramis
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2c97", ATTRS{idProduct}=="0002|2000|2001|2002|2003|2004|2005|2006|2007|2008|2009|200a|200b|200c|200d|200e|200f|2010|2011|2012|2013|2014|2015|2016|2017|2018|2019|201a|201b|201c|201d|201e|201f", TAG+="uaccess", TAG+="udev-acl"
# HW2
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2c97", ATTRS{idProduct}=="0003|3000|3001|3002|3003|3004|3005|3006|3007|3008|3009|300a|300b|300c|300d|300e|300f|3010|3011|3012|3013|3014|3015|3016|3017|3018|3019|301a|301b|301c|301d|301e|301f", TAG+="uaccess", TAG+="udev-acl"
# Nano X
SUBSYSTEMS=="usb", ATTRS{idVendor}=="2c97", ATTRS{idProduct}=="0004|4000|4001|4002|4003|4004|4005|4006|4007|4008|4009|400a|400b|400c|400d|400e|400f|4010|4011|4012|4013|4014|4015|4016|4017|4018|4019|401a|401b|401c|401d|401e|401f", TAG+="uaccess", TAG+="udev-acl"

View file

@ -1,16 +0,0 @@
# Linux udev support file.
#
# This is a example udev file for HIDAPI devices which changes the permissions
# to 0666 (world readable/writable) for a specific device on Linux systems.
#
# - Copy this file into /etc/udev/rules.d and unplug and re-plug your Coldcard.
# - Udev does not have to be restarted.
#
# probably not needed:
SUBSYSTEMS=="usb", ATTRS{idVendor}=="d13e", ATTRS{idProduct}=="cc10", GROUP="plugdev", MODE="0666"
# required:
# from <https://github.com/signal11/hidapi/blob/master/udev/99-hid.rules>
KERNEL=="hidraw*", ATTRS{idVendor}=="d13e", ATTRS{idProduct}=="cc10", GROUP="plugdev", MODE="0666"

View file

@ -1 +0,0 @@
SUBSYSTEM=="usb", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="dbb%n", ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2402"

View file

@ -1,10 +0,0 @@
# Put this file into /usr/lib/udev/rules.d or /etc/udev/rules.d
# Archos Safe-T mini
SUBSYSTEM=="usb", ATTR{idVendor}=="0e79", ATTR{idProduct}=="6000", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="safe-tr%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="0e79", ATTRS{idProduct}=="6000", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"
# Archos Safe-T mini Bootloader
SUBSYSTEM=="usb", ATTR{idVendor}=="0e79", ATTR{idProduct}=="6001", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="safe-t%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="0e79", ATTRS{idProduct}=="6001", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"

View file

@ -1,17 +0,0 @@
# Trezor: The Original Hardware Wallet
# https://trezor.io/
#
# Put this file into /etc/udev/rules.d
#
# If you are creating a distribution package,
# put this into /usr/lib/udev/rules.d or /lib/udev/rules.d
# depending on your distribution
# Trezor
SUBSYSTEM=="usb", ATTR{idVendor}=="534c", ATTR{idProduct}=="0001", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="trezor%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="534c", ATTRS{idProduct}=="0001", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"
# Trezor v2
SUBSYSTEM=="usb", ATTR{idVendor}=="1209", ATTR{idProduct}=="53c0", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="trezor%n"
SUBSYSTEM=="usb", ATTR{idVendor}=="1209", ATTR{idProduct}=="53c1", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="trezor%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="1209", ATTRS{idProduct}=="53c1", MODE="0660", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"

View file

@ -1,11 +0,0 @@
# KeepKey: Your Private Bitcoin Vault
# http://www.keepkey.com/
# Put this file into /usr/lib/udev/rules.d or /etc/udev/rules.d
# KeepKey HID Firmware/Bootloader
SUBSYSTEM=="usb", ATTR{idVendor}=="2b24", ATTR{idProduct}=="0001", MODE="0666", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="keepkey%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="2b24", ATTRS{idProduct}=="0001", MODE="0666", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"
# KeepKey WebUSB Firmware/Bootloader
SUBSYSTEM=="usb", ATTR{idVendor}=="2b24", ATTR{idProduct}=="0002", MODE="0666", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="keepkey%n"
KERNEL=="hidraw*", ATTRS{idVendor}=="2b24", ATTRS{idProduct}=="0002", MODE="0666", GROUP="plugdev", TAG+="uaccess", TAG+="udev-acl"

View file

@ -1 +0,0 @@
KERNEL=="hidraw*", SUBSYSTEM=="hidraw", ATTRS{idVendor}=="03eb", ATTRS{idProduct}=="2402", TAG+="uaccess", TAG+="udev-acl", SYMLINK+="dbbf%n"

View file

@ -1,24 +0,0 @@
# udev rules
This directory contains all of the udev rules for the supported devices
as retrieved from vendor websites and repositories.
These are necessary for the devices to be usable on Linux environments.
- `20-hw1.rules` (Ledger): https://github.com/LedgerHQ/udev-rules/blob/master/20-hw1.rules
- `51-coinkite.rules` (Coldcard): https://github.com/Coldcard/ckcc-protocol/blob/master/51-coinkite.rules
- `51-hid-digitalbitbox.rules`, `52-hid-digitalbitbox.rules` (Digital Bitbox): https://shiftcrypto.ch/start_linux
- `51-trezor.rules` (Trezor): https://github.com/trezor/trezor-common/blob/master/udev/51-trezor.rules
- `51-usb-keepkey.rules` (Keepkey): https://github.com/keepkey/udev-rules/blob/master/51-usb-keepkey.rules
- `51-safe-t.rules` (Archos): https://github.com/archos-safe-t/safe-t-common/blob/master/udev/51-safe-t.rules
# Usage
Apply these rules by copying them to `/etc/udev/rules.d/` and notifying `udevadm`.
Your user will need to be added to the `plugdev` group, which needs to be created if it does not already exist.
```
$ sudo groupadd plugdev
$ sudo usermod -aG plugdev $(whoami)
$ sudo cp contrib/udev/*.rules /etc/udev/rules.d/
$ sudo udevadm control --reload-rules && sudo udevadm trigger
```

5
contrib/upload Normal file → Executable file
View file

@ -2,17 +2,16 @@
set -e set -e
host=$1
version=`git describe --tags` version=`git describe --tags`
echo $version echo $version
here=$(dirname "$0") here=$(dirname "$0")
cd $here/../dist cd $here/../dist
sftp -oBatchMode=no -b - thomasv@$host << ! sftp -oBatchMode=no -b - thomasv@download.electrum.org << !
cd electrum-downloads cd electrum-downloads
mkdir $version mkdir $version
cd $version cd $version
mput * mput *
bye bye
! !

4
electrum-env Normal file → Executable file
View file

@ -1,4 +1,4 @@
#!/usr/bin/env bash #!/bin/bash
# #
# This script creates a virtualenv named 'env' and installs all # This script creates a virtualenv named 'env' and installs all
# python dependencies before activating the env and running Electrum. # python dependencies before activating the env and running Electrum.
@ -17,7 +17,7 @@ if [ -e ./env/bin/activate ]; then
else else
virtualenv env -p `which python3` virtualenv env -p `which python3`
source ./env/bin/activate source ./env/bin/activate
python3 -m pip install .[fast] python3 setup.py install
fi fi
export PYTHONPATH="/usr/local/lib/python${PYTHON_VER}/site-packages:$PYTHONPATH" export PYTHONPATH="/usr/local/lib/python${PYTHON_VER}/site-packages:$PYTHONPATH"

16
electrum.conf.sample Normal file
View file

@ -0,0 +1,16 @@
# Configuration file for the Electrum client
# Settings defined here are shared across wallets
#
# copy this file to /etc/electrum.conf if you want read-only settings
[client]
server = electrum.novit.ro:50001:t
proxy = None
gap_limit = 5
# booleans use python syntax
use_change = True
gui = qt
num_zeros = 2
# default transaction fee is in Satoshis
fee = 10000
winpos-qt = [799, 226, 877, 435]

View file

@ -3,20 +3,19 @@
[Desktop Entry] [Desktop Entry]
Comment=Lightweight Bitcoin Client Comment=Lightweight Bitcoin Client
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum %u" Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\" electrum %u"
GenericName[en_US]=Bitcoin Wallet GenericName[en_US]=Bitcoin Wallet
GenericName=Bitcoin Wallet GenericName=Bitcoin Wallet
Icon=electrum Icon=electrum
Name[en_US]=Electrum Bitcoin Wallet Name[en_US]=Electrum Bitcoin Wallet
Name=Electrum Bitcoin Wallet Name=Electrum Bitcoin Wallet
Categories=Finance;Network; Categories=Finance;Network;
StartupNotify=true StartupNotify=false
StartupWMClass=electrum
Terminal=false Terminal=false
Type=Application Type=Application
MimeType=x-scheme-handler/bitcoin; MimeType=x-scheme-handler/bitcoin;
Actions=Testnet; Actions=Testnet;
[Desktop Action Testnet] [Desktop Action Testnet]
Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\"; electrum --testnet %u" Exec=sh -c "PATH=\"\\$HOME/.local/bin:\\$PATH\" electrum --testnet %u"
Name=Testnet mode Name=Testnet mode

View file

@ -1,30 +1,14 @@
import sys
import os
# these are ~duplicated from run_electrum:
is_bundle = getattr(sys, 'frozen', False)
is_local = not is_bundle and os.path.exists(os.path.join(os.path.dirname(os.path.dirname(__file__)), "electrum.desktop"))
# when running from source, on Windows, also search for DLLs in inner 'electrum' folder
if is_local and os.name == 'nt':
if hasattr(os, 'add_dll_directory'): # requires python 3.8+
os.add_dll_directory(os.path.dirname(__file__))
from .version import ELECTRUM_VERSION from .version import ELECTRUM_VERSION
from .util import format_satoshis from .util import format_satoshis, print_msg, print_error, set_verbosity
from .wallet import Wallet from .wallet import Wallet
from .storage import WalletStorage from .storage import WalletStorage
from .coinchooser import COIN_CHOOSERS from .coinchooser import COIN_CHOOSERS
from .network import Network, pick_random_server from .network import Network, pick_random_server
from .interface import Interface from .interface import Connection, Interface
from .simple_config import SimpleConfig from .simple_config import SimpleConfig, get_config, set_config
from . import bitcoin from . import bitcoin
from . import transaction from . import transaction
from . import daemon from . import daemon
from .transaction import Transaction from .transaction import Transaction
from .plugin import BasePlugin from .plugin import BasePlugin
from .commands import Commands, known_commands from .commands import Commands, known_commands
__version__ = ELECTRUM_VERSION

File diff suppressed because it is too large Load diff

View file

@ -19,7 +19,6 @@
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE. # SOFTWARE.
import asyncio
import json import json
import locale import locale
import traceback import traceback
@ -27,14 +26,14 @@ import subprocess
import sys import sys
import os import os
import requests
from .version import ELECTRUM_VERSION from .version import ELECTRUM_VERSION
from . import constants from .import constants
from .i18n import _ from .i18n import _
from .util import make_aiohttp_session
from .logging import describe_os_version, Logger
class BaseCrashReporter(Logger): class BaseCrashReporter(object):
report_server = "https://crashhub.electrum.org" report_server = "https://crashhub.electrum.org"
config_key = "show_crash_reporter" config_key = "show_crash_reporter"
issue_template = """<h2>Traceback</h2> issue_template = """<h2>Traceback</h2>
@ -59,25 +58,18 @@ class BaseCrashReporter(Logger):
ASK_CONFIRM_SEND = _("Do you want to send this report?") ASK_CONFIRM_SEND = _("Do you want to send this report?")
def __init__(self, exctype, value, tb): def __init__(self, exctype, value, tb):
Logger.__init__(self)
self.exc_args = (exctype, value, tb) self.exc_args = (exctype, value, tb)
def send_report(self, asyncio_loop, proxy, endpoint="/crash", *, timeout=None): def send_report(self, endpoint="/crash"):
if constants.net.GENESIS[-4:] not in ["4943", "e26f"] and ".electrum.org" in BaseCrashReporter.report_server: if constants.net.GENESIS[-4:] not in ["4943", "e26f"] and ".electrum.org" in BaseCrashReporter.report_server:
# Gah! Some kind of altcoin wants to send us crash reports. # Gah! Some kind of altcoin wants to send us crash reports.
raise Exception(_("Missing report URL.")) raise Exception(_("Missing report URL."))
report = self.get_traceback_info() report = self.get_traceback_info()
report.update(self.get_additional_info()) report.update(self.get_additional_info())
report = json.dumps(report) report = json.dumps(report)
coro = self.do_post(proxy, BaseCrashReporter.report_server + endpoint, data=report) response = requests.post(BaseCrashReporter.report_server + endpoint, data=report)
response = asyncio.run_coroutine_threadsafe(coro, asyncio_loop).result(timeout)
return response return response
async def do_post(self, proxy, url, data):
async with make_aiohttp_session(proxy) as session:
async with session.post(url, data=data) as resp:
return await resp.text()
def get_traceback_info(self): def get_traceback_info(self):
exc_string = str(self.exc_args[1]) exc_string = str(self.exc_args[1])
stack = traceback.extract_tb(self.exc_args[2]) stack = traceback.extract_tb(self.exc_args[2])
@ -97,7 +89,7 @@ class BaseCrashReporter(Logger):
args = { args = {
"app_version": ELECTRUM_VERSION, "app_version": ELECTRUM_VERSION,
"python_version": sys.version, "python_version": sys.version,
"os": describe_os_version(), "os": self.get_os_version(),
"wallet_type": "unknown", "wallet_type": "unknown",
"locale": locale.getdefaultlocale()[0] or "?", "locale": locale.getdefaultlocale()[0] or "?",
"description": self.get_user_description() "description": self.get_user_description()
@ -132,19 +124,5 @@ class BaseCrashReporter(Logger):
def get_wallet_type(self): def get_wallet_type(self):
raise NotImplementedError raise NotImplementedError
def get_os_version(self):
def trigger_crash(): raise NotImplementedError
# note: do not change the type of the exception, the message,
# or the name of this method. All reports generated through this
# method will be grouped together by the crash reporter, and thus
# don't spam the issue tracker.
class TestingException(Exception):
pass
def crash_test():
raise TestingException("triggered crash for testing purposes")
import threading
t = threading.Thread(target=crash_test)
t.start()

View file

@ -25,31 +25,16 @@
import os import os
import sys import sys
import copy
import traceback import traceback
from functools import partial from functools import partial
from typing import List, TYPE_CHECKING, Tuple, NamedTuple, Any, Dict, Optional
from . import bitcoin from . import bitcoin
from . import keystore from . import keystore
from . import mnemonic from .keystore import bip44_derivation, purpose48_derivation
from .bip32 import is_bip32_derivation, xpub_type, normalize_bip32_derivation, BIP32Node from .wallet import Imported_Wallet, Standard_Wallet, Multisig_Wallet, wallet_types, Wallet
from .keystore import bip44_derivation, purpose48_derivation, Hardware_KeyStore, KeyStore from .storage import STO_EV_USER_PW, STO_EV_XPUB_PW, get_derivation_used_for_hw_device_encryption
from .wallet import (Imported_Wallet, Standard_Wallet, Multisig_Wallet,
wallet_types, Wallet, Abstract_Wallet)
from .storage import (WalletStorage, StorageEncryptionVersion,
get_derivation_used_for_hw_device_encryption)
from .wallet_db import WalletDB
from .i18n import _ from .i18n import _
from .util import UserCancelled, InvalidPassword, WalletFileException from .util import UserCancelled, InvalidPassword, WalletFileException
from .simple_config import SimpleConfig
from .plugin import Plugins, HardwarePluginLibraryUnavailable
from .logging import Logger
from .plugins.hw_wallet.plugin import OutdatedHwFirmwareException, HW_PluginBase
if TYPE_CHECKING:
from .plugin import DeviceInfo, BasePlugin
# hardware device setup purpose # hardware device setup purpose
HWD_SETUP_NEW_WALLET, HWD_SETUP_DECRYPT_WALLET = range(0, 2) HWD_SETUP_NEW_WALLET, HWD_SETUP_DECRYPT_WALLET = range(0, 2)
@ -61,77 +46,53 @@ class ScriptTypeNotSupported(Exception): pass
class GoBack(Exception): pass class GoBack(Exception): pass
class WizardStackItem(NamedTuple): class BaseWizard(object):
action: Any
args: Any
kwargs: Dict[str, Any]
db_data: dict
def __init__(self, config, plugins, storage):
class WizardWalletPasswordSetting(NamedTuple):
password: Optional[str]
encrypt_storage: bool
storage_enc_version: StorageEncryptionVersion
encrypt_keystore: bool
class BaseWizard(Logger):
def __init__(self, config: SimpleConfig, plugins: Plugins):
super(BaseWizard, self).__init__() super(BaseWizard, self).__init__()
Logger.__init__(self)
self.config = config self.config = config
self.plugins = plugins self.plugins = plugins
self.data = {} self.storage = storage
self.pw_args = None # type: Optional[WizardWalletPasswordSetting] self.wallet = None
self._stack = [] # type: List[WizardStackItem] self.stack = []
self.plugin = None # type: Optional[BasePlugin] self.plugin = None
self.keystores = [] # type: List[KeyStore] self.keystores = []
self.is_kivy = config.get('gui') == 'kivy' self.is_kivy = config.get('gui') == 'kivy'
self.seed_type = None self.seed_type = None
def set_icon(self, icon): def set_icon(self, icon):
pass pass
def run(self, *args, **kwargs): def run(self, *args):
action = args[0] action = args[0]
args = args[1:] args = args[1:]
db_data = copy.deepcopy(self.data) self.stack.append((action, args))
self._stack.append(WizardStackItem(action, args, kwargs, db_data))
if not action: if not action:
return return
if type(action) is tuple: if type(action) is tuple:
self.plugin, action = action self.plugin, action = action
if self.plugin and hasattr(self.plugin, action): if self.plugin and hasattr(self.plugin, action):
f = getattr(self.plugin, action) f = getattr(self.plugin, action)
f(self, *args, **kwargs) f(self, *args)
elif hasattr(self, action): elif hasattr(self, action):
f = getattr(self, action) f = getattr(self, action)
f(*args, **kwargs) f(*args)
else: else:
raise Exception("unknown action", action) raise Exception("unknown action", action)
def can_go_back(self): def can_go_back(self):
return len(self._stack) > 1 return len(self.stack)>1
def go_back(self): def go_back(self):
if not self.can_go_back(): if not self.can_go_back():
return return
# pop 'current' frame self.stack.pop()
self._stack.pop() action, args = self.stack.pop()
# pop 'previous' frame self.run(action, *args)
stack_item = self._stack.pop()
# try to undo side effects since we last entered 'previous' frame
# FIXME only self.storage is properly restored
self.data = copy.deepcopy(stack_item.db_data)
# rerun 'previous' frame
self.run(stack_item.action, *stack_item.args, **stack_item.kwargs)
def reset_stack(self):
self._stack = []
def new(self): def new(self):
title = _("Create new wallet") name = os.path.basename(self.storage.path)
title = _("Create") + ' ' + name
message = '\n'.join([ message = '\n'.join([
_("What kind of wallet do you want to create?") _("What kind of wallet do you want to create?")
]) ])
@ -139,48 +100,49 @@ class BaseWizard(Logger):
('standard', _("Standard wallet")), ('standard', _("Standard wallet")),
('2fa', _("Wallet with two-factor authentication")), ('2fa', _("Wallet with two-factor authentication")),
('multisig', _("Multi-signature wallet")), ('multisig', _("Multi-signature wallet")),
('imported', _("Import LBRY Credits addresses or private keys")), ('imported', _("Import Bitcoin addresses or private keys")),
] ]
choices = [pair for pair in wallet_kinds if pair[0] in wallet_types] choices = [pair for pair in wallet_kinds if pair[0] in wallet_types]
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.on_wallet_type) self.choice_dialog(title=title, message=message, choices=choices, run_next=self.on_wallet_type)
def upgrade_db(self, storage, db): def upgrade_storage(self):
exc = None exc = None
def on_finished(): def on_finished():
if exc is None: if exc is None:
self.terminate(storage=storage, db=db) self.wallet = Wallet(self.storage)
self.terminate()
else: else:
raise exc raise exc
def do_upgrade(): def do_upgrade():
nonlocal exc nonlocal exc
try: try:
db.upgrade() self.storage.upgrade()
except Exception as e: except Exception as e:
exc = e exc = e
self.waiting_dialog(do_upgrade, _('Upgrading wallet format...'), on_finished=on_finished) self.waiting_dialog(do_upgrade, _('Upgrading wallet format...'), on_finished=on_finished)
def load_2fa(self): def load_2fa(self):
self.data['wallet_type'] = '2fa' self.storage.put('wallet_type', '2fa')
self.data['use_trustedcoin'] = True self.storage.put('use_trustedcoin', True)
self.plugin = self.plugins.load_plugin('trustedcoin') self.plugin = self.plugins.load_plugin('trustedcoin')
def on_wallet_type(self, choice): def on_wallet_type(self, choice):
self.data['wallet_type'] = self.wallet_type = choice self.wallet_type = choice
if choice == 'standard': if choice == 'standard':
action = 'choose_keystore' action = 'choose_keystore'
elif choice == 'multisig': elif choice == 'multisig':
action = 'choose_multisig' action = 'choose_multisig'
elif choice == '2fa': elif choice == '2fa':
self.load_2fa() self.load_2fa()
action = self.plugin.get_action(self.data) action = self.storage.get_action()
elif choice == 'imported': elif choice == 'imported':
action = 'import_addresses_or_keys' action = 'import_addresses_or_keys'
self.run(action) self.run(action)
def choose_multisig(self): def choose_multisig(self):
def on_multisig(m, n): def on_multisig(m, n):
multisig_type = "%dof%d" % (m, n) self.multisig_type = "%dof%d"%(m, n)
self.data['wallet_type'] = multisig_type self.storage.put('wallet_type', self.multisig_type)
self.n = n self.n = n
self.run('choose_keystore') self.run('choose_keystore')
self.multisig_dialog(run_next=on_multisig) self.multisig_dialog(run_next=on_multisig)
@ -210,29 +172,26 @@ class BaseWizard(Logger):
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run) self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
def import_addresses_or_keys(self): def import_addresses_or_keys(self):
v = lambda x: keystore.is_address_list(x) or keystore.is_private_key_list(x, raise_on_error=True) v = lambda x: keystore.is_address_list(x) or keystore.is_private_key_list(x)
title = _("Import LBRY Credits Addresses") title = _("Import Bitcoin Addresses")
message = _("Enter a list of LBRY Credits addresses (this will create a watching-only wallet), or a list of private keys.") message = _("Enter a list of Bitcoin addresses (this will create a watching-only wallet), or a list of private keys.")
self.add_xpub_dialog(title=title, message=message, run_next=self.on_import, self.add_xpub_dialog(title=title, message=message, run_next=self.on_import,
is_valid=v, allow_multi=True, show_wif_help=True) is_valid=v, allow_multi=True, show_wif_help=True)
def on_import(self, text): def on_import(self, text):
# text is already sanitized by is_address_list and is_private_keys_list # create a temporary wallet and exploit that modifications
# will be reflected on self.storage
if keystore.is_address_list(text): if keystore.is_address_list(text):
self.data['addresses'] = {} w = Imported_Wallet(self.storage)
for addr in text.split(): for x in text.split():
assert bitcoin.is_address(addr) w.import_address(x)
self.data['addresses'][addr] = {}
elif keystore.is_private_key_list(text): elif keystore.is_private_key_list(text):
self.data['addresses'] = {}
k = keystore.Imported_KeyStore({}) k = keystore.Imported_KeyStore({})
keys = keystore.get_private_keys(text) self.storage.put('keystore', k.dump())
for pk in keys: w = Imported_Wallet(self.storage)
assert bitcoin.is_private_key(pk) for x in keystore.get_private_keys(text):
txin_type, pubkey = k.import_privkey(pk, None) w.import_private_key(x, None)
addr = bitcoin.pubkey_to_address(txin_type, pubkey) self.keystores.append(w.keystore)
self.data['addresses'][addr] = {'type':txin_type, 'pubkey':pubkey}
self.keystores.append(k)
else: else:
return self.terminate() return self.terminate()
return self.run('create_wallet') return self.run('create_wallet')
@ -254,70 +213,49 @@ class BaseWizard(Logger):
k = keystore.from_master_key(text) k = keystore.from_master_key(text)
self.on_keystore(k) self.on_keystore(k)
def choose_hw_device(self, purpose=HWD_SETUP_NEW_WALLET, *, storage=None): def choose_hw_device(self, purpose=HWD_SETUP_NEW_WALLET):
title = _('Hardware Keystore') title = _('Hardware Keystore')
# check available plugins # check available plugins
supported_plugins = self.plugins.get_hardware_support() support = self.plugins.get_hardware_support()
devices = [] # type: List[Tuple[str, DeviceInfo]] if not support:
devmgr = self.plugins.device_manager msg = '\n'.join([
debug_msg = '' _('No hardware wallet support found on your system.'),
_('Please install the relevant libraries (eg python-trezor for Trezor).'),
def failed_getting_device_infos(name, e): ])
nonlocal debug_msg self.confirm_dialog(title=title, message=msg, run_next= lambda x: self.choose_hw_device(purpose))
err_str_oneline = ' // '.join(str(e).splitlines()) return
self.logger.warning(f'error getting device infos for {name}: {err_str_oneline}')
indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
debug_msg += f' {name}: (error getting device infos)\n{indented_error_msg}\n'
# scan devices # scan devices
devices = []
devmgr = self.plugins.device_manager
try: try:
scanned_devices = devmgr.scan_devices() scanned_devices = devmgr.scan_devices()
except BaseException as e: except BaseException as e:
self.logger.info('error scanning devices: {}'.format(repr(e))) devmgr.print_error('error scanning devices: {}'.format(e))
debug_msg = ' {}:\n {}'.format(_('Error scanning devices'), e) debug_msg = ' {}:\n {}'.format(_('Error scanning devices'), e)
else: else:
for splugin in supported_plugins: debug_msg = ''
name, plugin = splugin.name, splugin.plugin for name, description, plugin in support:
# plugin init errored?
if not plugin:
e = splugin.exception
indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
debug_msg += f' {name}: (error during plugin init)\n'
debug_msg += ' {}\n'.format(_('You might have an incompatible library.'))
debug_msg += f'{indented_error_msg}\n'
continue
# see if plugin recognizes 'scanned_devices'
try: try:
# FIXME: side-effect: unpaired_device_info sets client.handler # FIXME: side-effect: unpaired_device_info sets client.handler
device_infos = devmgr.unpaired_device_infos(None, plugin, devices=scanned_devices, u = devmgr.unpaired_device_infos(None, plugin, devices=scanned_devices)
include_failing_clients=True)
except HardwarePluginLibraryUnavailable as e:
failed_getting_device_infos(name, e)
continue
except BaseException as e: except BaseException as e:
self.logger.exception('') devmgr.print_error('error getting device infos for {}: {}'.format(name, e))
failed_getting_device_infos(name, e) indented_error_msg = ' '.join([''] + str(e).splitlines(keepends=True))
debug_msg += ' {}:\n{}\n'.format(plugin.name, indented_error_msg)
continue continue
device_infos_failing = list(filter(lambda di: di.exception is not None, device_infos)) devices += list(map(lambda x: (name, x), u))
for di in device_infos_failing:
failed_getting_device_infos(name, di.exception)
device_infos_working = list(filter(lambda di: di.exception is None, device_infos))
devices += list(map(lambda x: (name, x), device_infos_working))
if not debug_msg: if not debug_msg:
debug_msg = ' {}'.format(_('No exceptions encountered.')) debug_msg = ' {}'.format(_('No exceptions encountered.'))
if not devices: if not devices:
msg = (_('No hardware device detected.') + '\n' + msg = ''.join([
_('To trigger a rescan, press \'Next\'.') + '\n\n') _('No hardware device detected.') + '\n',
if sys.platform == 'win32': _('To trigger a rescan, press \'Next\'.') + '\n\n',
msg += _('If your device is not detected on Windows, go to "Settings", "Devices", "Connected devices", ' _('If your device is not detected on Windows, go to "Settings", "Devices", "Connected devices", and do "Remove device". Then, plug your device again.') + ' ',
'and do "Remove device". Then, plug your device again.') + '\n' _('On Linux, you might have to add a new permission to your udev rules.') + '\n\n',
msg += _('While this is less than ideal, it might help if you run Electrum as Administrator.') + '\n' _('Debug message') + '\n',
else: debug_msg
msg += _('On Linux, you might have to add a new permission to your udev rules.') + '\n' ])
msg += '\n\n' self.confirm_dialog(title=title, message=msg, run_next= lambda x: self.choose_hw_device(purpose))
msg += _('Debug message') + '\n' + debug_msg
self.confirm_dialog(title=title, message=msg,
run_next=lambda x: self.choose_hw_device(purpose, storage=storage))
return return
# select device # select device
self.devices = devices self.devices = devices
@ -325,16 +263,13 @@ class BaseWizard(Logger):
for name, info in devices: for name, info in devices:
state = _("initialized") if info.initialized else _("wiped") state = _("initialized") if info.initialized else _("wiped")
label = info.label or _("An unnamed {}").format(name) label = info.label or _("An unnamed {}").format(name)
try: transport_str = info.device.transport_ui_string[:20] descr = "%s [%s, %s]" % (label, name, state)
except: transport_str = 'unknown transport'
descr = f"{label} [{name}, {state}, {transport_str}]"
choices.append(((name, info), descr)) choices.append(((name, info), descr))
msg = _('Select a device') + ':' msg = _('Select a device') + ':'
self.choice_dialog(title=title, message=msg, choices=choices, self.choice_dialog(title=title, message=msg, choices=choices, run_next= lambda *args: self.on_device(*args, purpose=purpose))
run_next=lambda *args: self.on_device(*args, purpose=purpose, storage=storage))
def on_device(self, name, device_info, *, purpose, storage=None): def on_device(self, name, device_info, *, purpose):
self.plugin = self.plugins.get_plugin(name) # type: HW_PluginBase self.plugin = self.plugins.get_plugin(name)
try: try:
self.plugin.setup_device(device_info, self, purpose) self.plugin.setup_device(device_info, self, purpose)
except OSError as e: except OSError as e:
@ -344,35 +279,26 @@ class BaseWizard(Logger):
+ _('Please try again.')) + _('Please try again.'))
devmgr = self.plugins.device_manager devmgr = self.plugins.device_manager
devmgr.unpair_id(device_info.device.id_) devmgr.unpair_id(device_info.device.id_)
self.choose_hw_device(purpose, storage=storage) self.choose_hw_device(purpose)
return
except OutdatedHwFirmwareException as e:
if self.question(e.text_ignore_old_fw_and_continue(), title=_("Outdated device firmware")):
self.plugin.set_ignore_outdated_fw()
# will need to re-pair
devmgr = self.plugins.device_manager
devmgr.unpair_id(device_info.device.id_)
self.choose_hw_device(purpose, storage=storage)
return return
except (UserCancelled, GoBack): except (UserCancelled, GoBack):
self.choose_hw_device(purpose, storage=storage) self.choose_hw_device(purpose)
return return
except BaseException as e: except BaseException as e:
self.logger.exception('') traceback.print_exc(file=sys.stderr)
self.show_error(str(e)) self.show_error(str(e))
self.choose_hw_device(purpose, storage=storage) self.choose_hw_device(purpose)
return return
if purpose == HWD_SETUP_NEW_WALLET: if purpose == HWD_SETUP_NEW_WALLET:
def f(derivation, script_type): def f(derivation, script_type):
derivation = normalize_bip32_derivation(derivation)
self.run('on_hw_derivation', name, device_info, derivation, script_type) self.run('on_hw_derivation', name, device_info, derivation, script_type)
self.derivation_and_script_type_dialog(f) self.derivation_and_script_type_dialog(f)
elif purpose == HWD_SETUP_DECRYPT_WALLET: elif purpose == HWD_SETUP_DECRYPT_WALLET:
derivation = get_derivation_used_for_hw_device_encryption() derivation = get_derivation_used_for_hw_device_encryption()
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, 'standard', self) xpub = self.plugin.get_xpub(device_info.device.id_, derivation, 'standard', self)
password = keystore.Xpub.get_pubkey_from_xpub(xpub, ()).hex() password = keystore.Xpub.get_pubkey_from_xpub(xpub, ())
try: try:
storage.decrypt(password) self.storage.decrypt(password)
except InvalidPassword: except InvalidPassword:
# try to clear session so that user can type another passphrase # try to clear session so that user can type another passphrase
devmgr = self.plugins.device_manager devmgr = self.plugins.device_manager
@ -385,7 +311,7 @@ class BaseWizard(Logger):
def derivation_and_script_type_dialog(self, f): def derivation_and_script_type_dialog(self, f):
message1 = _('Choose the type of addresses in your wallet.') message1 = _('Choose the type of addresses in your wallet.')
message2 = ' '.join([ message2 = '\n'.join([
_('You can override the suggested derivation path.'), _('You can override the suggested derivation path.'),
_('If you are not sure what this is, leave this field unchanged.') _('If you are not sure what this is, leave this field unchanged.')
]) ])
@ -393,13 +319,12 @@ class BaseWizard(Logger):
# There is no general standard for HD multisig. # There is no general standard for HD multisig.
# For legacy, this is partially compatible with BIP45; assumes index=0 # For legacy, this is partially compatible with BIP45; assumes index=0
# For segwit, a custom path is used, as there is no standard at all. # For segwit, a custom path is used, as there is no standard at all.
default_choice_idx = 0
choices = [ choices = [
('standard', 'legacy multisig (p2sh)', normalize_bip32_derivation("m/45'/0")), ('standard', 'legacy multisig (p2sh)', "m/45'/0"),
('p2wsh-p2sh', 'p2sh-segwit multisig (p2wsh-p2sh)', purpose48_derivation(0, xtype='p2wsh-p2sh')),
('p2wsh', 'native segwit multisig (p2wsh)', purpose48_derivation(0, xtype='p2wsh')),
] ]
else: else:
default_choice_idx = 0
choices = [ choices = [
('standard', 'legacy (p2pkh)', bip44_derivation(0, bip43_purpose=44)), ('standard', 'legacy (p2pkh)', bip44_derivation(0, bip43_purpose=44)),
('p2wpkh-p2sh', 'p2sh-segwit (p2wpkh-p2sh)', bip44_derivation(0, bip43_purpose=49)), ('p2wpkh-p2sh', 'p2sh-segwit (p2wpkh-p2sh)', bip44_derivation(0, bip43_purpose=49)),
@ -409,8 +334,7 @@ class BaseWizard(Logger):
try: try:
self.choice_and_line_dialog( self.choice_and_line_dialog(
run_next=f, title=_('Script type and Derivation path'), message1=message1, run_next=f, title=_('Script type and Derivation path'), message1=message1,
message2=message2, choices=choices, test_text=is_bip32_derivation, message2=message2, choices=choices, test_text=bitcoin.is_bip32_derivation)
default_choice_idx=default_choice_idx)
return return
except ScriptTypeNotSupported as e: except ScriptTypeNotSupported as e:
self.show_error(e) self.show_error(e)
@ -418,23 +342,18 @@ class BaseWizard(Logger):
def on_hw_derivation(self, name, device_info, derivation, xtype): def on_hw_derivation(self, name, device_info, derivation, xtype):
from .keystore import hardware_keystore from .keystore import hardware_keystore
devmgr = self.plugins.device_manager
try: try:
xpub = self.plugin.get_xpub(device_info.device.id_, derivation, xtype, self) xpub = self.plugin.get_xpub(device_info.device.id_, derivation, xtype, self)
client = devmgr.client_by_id(device_info.device.id_)
if not client: raise Exception("failed to find client for device id")
root_fingerprint = client.request_root_fingerprint_from_device()
except ScriptTypeNotSupported: except ScriptTypeNotSupported:
raise # this is handled in derivation_dialog raise # this is handled in derivation_dialog
except BaseException as e: except BaseException as e:
self.logger.exception('') traceback.print_exc(file=sys.stderr)
self.show_error(e) self.show_error(e)
return return
d = { d = {
'type': 'hardware', 'type': 'hardware',
'hw_type': name, 'hw_type': name,
'derivation': derivation, 'derivation': derivation,
'root_fingerprint': root_fingerprint,
'xpub': xpub, 'xpub': xpub,
'label': device_info.label, 'label': device_info.label,
} }
@ -459,12 +378,12 @@ class BaseWizard(Logger):
def restore_from_seed(self): def restore_from_seed(self):
self.opt_bip39 = True self.opt_bip39 = True
self.opt_ext = True self.opt_ext = True
is_cosigning_seed = lambda x: mnemonic.seed_type(x) in ['standard', 'segwit'] is_cosigning_seed = lambda x: bitcoin.seed_type(x) in ['standard', 'segwit']
test = mnemonic.is_seed if self.wallet_type == 'standard' else is_cosigning_seed test = bitcoin.is_seed if self.wallet_type == 'standard' else is_cosigning_seed
self.restore_seed_dialog(run_next=self.on_restore_seed, test=test) self.restore_seed_dialog(run_next=self.on_restore_seed, test=test)
def on_restore_seed(self, seed, is_bip39, is_ext): def on_restore_seed(self, seed, is_bip39, is_ext):
self.seed_type = 'bip39' if is_bip39 else mnemonic.seed_type(seed) self.seed_type = 'bip39' if is_bip39 else bitcoin.seed_type(seed)
if self.seed_type == 'bip39': if self.seed_type == 'bip39':
f = lambda passphrase: self.on_restore_bip39(seed, passphrase) f = lambda passphrase: self.on_restore_bip39(seed, passphrase)
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('') self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
@ -473,7 +392,7 @@ class BaseWizard(Logger):
self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('') self.passphrase_dialog(run_next=f, is_restoring=True) if is_ext else f('')
elif self.seed_type == 'old': elif self.seed_type == 'old':
self.run('create_keystore', seed, '') self.run('create_keystore', seed, '')
elif mnemonic.is_any_2fa_seed_type(self.seed_type): elif self.seed_type == '2fa':
self.load_2fa() self.load_2fa()
self.run('on_restore_seed', seed, is_ext) self.run('on_restore_seed', seed, is_ext)
else: else:
@ -481,7 +400,6 @@ class BaseWizard(Logger):
def on_restore_bip39(self, seed, passphrase): def on_restore_bip39(self, seed, passphrase):
def f(derivation, script_type): def f(derivation, script_type):
derivation = normalize_bip32_derivation(derivation)
self.run('on_bip43', seed, passphrase, derivation, script_type) self.run('on_bip43', seed, passphrase, derivation, script_type)
self.derivation_and_script_type_dialog(f) self.derivation_and_script_type_dialog(f)
@ -496,6 +414,7 @@ class BaseWizard(Logger):
def on_keystore(self, k): def on_keystore(self, k):
has_xpub = isinstance(k, keystore.Xpub) has_xpub = isinstance(k, keystore.Xpub)
if has_xpub: if has_xpub:
from .bitcoin import xpub_type
t1 = xpub_type(k.xpub) t1 = xpub_type(k.xpub)
if self.wallet_type == 'standard': if self.wallet_type == 'standard':
if has_xpub and t1 not in ['standard', 'p2wpkh', 'p2wpkh-p2sh']: if has_xpub and t1 not in ['standard', 'p2wpkh', 'p2wpkh-p2sh']:
@ -523,7 +442,7 @@ class BaseWizard(Logger):
self.keystores.append(k) self.keystores.append(k)
if len(self.keystores) == 1: if len(self.keystores) == 1:
xpub = k.get_master_public_key() xpub = k.get_master_public_key()
self.reset_stack() self.stack = []
self.run('show_xpub_and_add_cosigners', xpub) self.run('show_xpub_and_add_cosigners', xpub)
elif len(self.keystores) < self.n: elif len(self.keystores) < self.n:
self.run('choose_keystore') self.run('choose_keystore')
@ -534,9 +453,9 @@ class BaseWizard(Logger):
encrypt_keystore = any(k.may_have_password() for k in self.keystores) encrypt_keystore = any(k.may_have_password() for k in self.keystores)
# note: the following condition ("if") is duplicated logic from # note: the following condition ("if") is duplicated logic from
# wallet.get_available_storage_encryption_version() # wallet.get_available_storage_encryption_version()
if self.wallet_type == 'standard' and isinstance(self.keystores[0], Hardware_KeyStore): if self.wallet_type == 'standard' and isinstance(self.keystores[0], keystore.Hardware_KeyStore):
# offer encrypting with a pw derived from the hw device # offer encrypting with a pw derived from the hw device
k = self.keystores[0] # type: Hardware_KeyStore k = self.keystores[0]
try: try:
k.handler = self.plugin.create_handler(self) k.handler = self.plugin.create_handler(self)
password = k.get_password_for_storage_encryption() password = k.get_password_for_storage_encryption()
@ -546,91 +465,68 @@ class BaseWizard(Logger):
self.choose_hw_device() self.choose_hw_device()
return return
except BaseException as e: except BaseException as e:
self.logger.exception('') traceback.print_exc(file=sys.stderr)
self.show_error(str(e)) self.show_error(str(e))
return return
self.request_storage_encryption( self.request_storage_encryption(
run_next=lambda encrypt_storage: self.on_password( run_next=lambda encrypt_storage: self.on_password(
password, password,
encrypt_storage=encrypt_storage, encrypt_storage=encrypt_storage,
storage_enc_version=StorageEncryptionVersion.XPUB_PASSWORD, storage_enc_version=STO_EV_XPUB_PW,
encrypt_keystore=False)) encrypt_keystore=False))
else: else:
# reset stack to disable 'back' button in password dialog
self.reset_stack()
# prompt the user to set an arbitrary password # prompt the user to set an arbitrary password
self.request_password( self.request_password(
run_next=lambda password, encrypt_storage: self.on_password( run_next=lambda password, encrypt_storage: self.on_password(
password, password,
encrypt_storage=encrypt_storage, encrypt_storage=encrypt_storage,
storage_enc_version=StorageEncryptionVersion.USER_PASSWORD, storage_enc_version=STO_EV_USER_PW,
encrypt_keystore=encrypt_keystore), encrypt_keystore=encrypt_keystore),
force_disable_encrypt_cb=not encrypt_keystore) force_disable_encrypt_cb=not encrypt_keystore)
def on_password(self, password, *, encrypt_storage: bool, def on_password(self, password, *, encrypt_storage,
storage_enc_version=StorageEncryptionVersion.USER_PASSWORD, storage_enc_version=STO_EV_USER_PW, encrypt_keystore):
encrypt_keystore: bool): self.storage.set_keystore_encryption(bool(password) and encrypt_keystore)
if encrypt_storage:
self.storage.set_password(password, enc_version=storage_enc_version)
for k in self.keystores: for k in self.keystores:
if k.may_have_password(): if k.may_have_password():
k.update_password(None, password) k.update_password(None, password)
if self.wallet_type == 'standard': if self.wallet_type == 'standard':
self.data['seed_type'] = self.seed_type self.storage.put('seed_type', self.seed_type)
keys = self.keystores[0].dump() keys = self.keystores[0].dump()
self.data['keystore'] = keys self.storage.put('keystore', keys)
self.wallet = Standard_Wallet(self.storage)
self.run('create_addresses')
elif self.wallet_type == 'multisig': elif self.wallet_type == 'multisig':
for i, k in enumerate(self.keystores): for i, k in enumerate(self.keystores):
self.data['x%d/'%(i+1)] = k.dump() self.storage.put('x%d/'%(i+1), k.dump())
self.storage.write()
self.wallet = Multisig_Wallet(self.storage)
self.run('create_addresses')
elif self.wallet_type == 'imported': elif self.wallet_type == 'imported':
if len(self.keystores) > 0: if len(self.keystores) > 0:
keys = self.keystores[0].dump() keys = self.keystores[0].dump()
self.data['keystore'] = keys self.storage.put('keystore', keys)
else: self.wallet = Imported_Wallet(self.storage)
raise Exception('Unknown wallet type') self.wallet.storage.write()
self.pw_args = WizardWalletPasswordSetting(password=password, self.terminate()
encrypt_storage=encrypt_storage,
storage_enc_version=storage_enc_version,
encrypt_keystore=encrypt_keystore)
self.terminate()
def create_storage(self, path):
if os.path.exists(path):
raise Exception('file already exists at path')
if not self.pw_args:
return
pw_args = self.pw_args
self.pw_args = None # clean-up so that it can get GC-ed
storage = WalletStorage(path)
if pw_args.encrypt_storage:
storage.set_password(pw_args.password, enc_version=pw_args.storage_enc_version)
db = WalletDB('', manual_upgrades=False)
db.set_keystore_encryption(bool(pw_args.password) and pw_args.encrypt_keystore)
for key, value in self.data.items():
db.put(key, value)
db.load_plugins()
db.write(storage)
return storage, db
def terminate(self, *, storage: Optional[WalletStorage], db: Optional[WalletDB] = None):
raise NotImplementedError() # implemented by subclasses
def show_xpub_and_add_cosigners(self, xpub): def show_xpub_and_add_cosigners(self, xpub):
self.show_xpub_dialog(xpub=xpub, run_next=lambda x: self.run('choose_keystore')) self.show_xpub_dialog(xpub=xpub, run_next=lambda x: self.run('choose_keystore'))
def choose_seed_type(self, message=None, choices=None): def choose_seed_type(self):
title = _('Choose Seed type') title = _('Choose Seed type')
if message is None: message = ' '.join([
message = ' '.join([ _("The type of addresses used by your wallet will depend on your seed."),
_("The type of addresses used by your wallet will depend on your seed."), _("Segwit wallets use bech32 addresses, defined in BIP173."),
_("Segwit wallets use bech32 addresses, defined in BIP173."), _("Please note that websites and other wallets may not support these addresses yet."),
_("Please note that websites and other wallets may not support these addresses yet."), _("Thus, you might want to keep using a non-segwit wallet in order to be able to receive bitcoins during the transition period.")
_("Thus, you might want to keep using a non-segwit wallet in order to be able to receive LBRY Credits during the transition period.") ])
]) choices = [
if choices is None: ('create_standard_seed', _('Standard')),
choices = [ ('create_segwit_seed', _('Segwit')),
]
('create_standard_seed', _('Legacy')),
]
self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run) self.choice_dialog(title=title, message=message, choices=choices, run_next=self.run)
def create_segwit_seed(self): self.create_seed('segwit') def create_segwit_seed(self): self.create_seed('segwit')
@ -666,3 +562,11 @@ class BaseWizard(Logger):
self.line_dialog(run_next=f, title=title, message=message, default='', test=lambda x: x==passphrase) self.line_dialog(run_next=f, title=title, message=message, default='', test=lambda x: x==passphrase)
else: else:
f('') f('')
def create_addresses(self):
def task():
self.wallet.synchronize()
self.wallet.storage.write()
self.terminate()
msg = _("Electrum is generating your addresses, please wait...")
self.waiting_dialog(task, msg)

View file

@ -1,403 +0,0 @@
# Copyright (C) 2018 The Electrum developers
# Distributed under the MIT software license, see the accompanying
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
import hashlib
from typing import List, Tuple, NamedTuple, Union, Iterable, Sequence, Optional
from .util import bfh, bh2u, BitcoinException
from . import constants
from . import ecc
from .crypto import hash_160, hmac_oneshot
from .bitcoin import rev_hex, int_to_hex, EncodeBase58Check, DecodeBase58Check
from .logging import get_logger
_logger = get_logger(__name__)
BIP32_PRIME = 0x80000000
UINT32_MAX = (1 << 32) - 1
def protect_against_invalid_ecpoint(func):
def func_wrapper(*args):
child_index = args[-1]
while True:
is_prime = child_index & BIP32_PRIME
try:
return func(*args[:-1], child_index=child_index)
except ecc.InvalidECPointException:
_logger.warning('bip32 protect_against_invalid_ecpoint: skipping index')
child_index += 1
is_prime2 = child_index & BIP32_PRIME
if is_prime != is_prime2: raise OverflowError()
return func_wrapper
@protect_against_invalid_ecpoint
def CKD_priv(parent_privkey: bytes, parent_chaincode: bytes, child_index: int) -> Tuple[bytes, bytes]:
"""Child private key derivation function (from master private key)
If n is hardened (i.e. the 32nd bit is set), the resulting private key's
corresponding public key can NOT be determined without the master private key.
However, if n is not hardened, the resulting private key's corresponding
public key can be determined without the master private key.
"""
if child_index < 0: raise ValueError('the bip32 index needs to be non-negative')
is_hardened_child = bool(child_index & BIP32_PRIME)
return _CKD_priv(parent_privkey=parent_privkey,
parent_chaincode=parent_chaincode,
child_index=bfh(rev_hex(int_to_hex(child_index, 4))),
is_hardened_child=is_hardened_child)
def _CKD_priv(parent_privkey: bytes, parent_chaincode: bytes,
child_index: bytes, is_hardened_child: bool) -> Tuple[bytes, bytes]:
try:
keypair = ecc.ECPrivkey(parent_privkey)
except ecc.InvalidECPointException as e:
raise BitcoinException('Impossible xprv (not within curve order)') from e
parent_pubkey = keypair.get_public_key_bytes(compressed=True)
if is_hardened_child:
data = bytes([0]) + parent_privkey + child_index
else:
data = parent_pubkey + child_index
I = hmac_oneshot(parent_chaincode, data, hashlib.sha512)
I_left = ecc.string_to_number(I[0:32])
child_privkey = (I_left + ecc.string_to_number(parent_privkey)) % ecc.CURVE_ORDER
if I_left >= ecc.CURVE_ORDER or child_privkey == 0:
raise ecc.InvalidECPointException()
child_privkey = int.to_bytes(child_privkey, length=32, byteorder='big', signed=False)
child_chaincode = I[32:]
return child_privkey, child_chaincode
@protect_against_invalid_ecpoint
def CKD_pub(parent_pubkey: bytes, parent_chaincode: bytes, child_index: int) -> Tuple[bytes, bytes]:
"""Child public key derivation function (from public key only)
This function allows us to find the nth public key, as long as n is
not hardened. If n is hardened, we need the master private key to find it.
"""
if child_index < 0: raise ValueError('the bip32 index needs to be non-negative')
if child_index & BIP32_PRIME: raise Exception('not possible to derive hardened child from parent pubkey')
return _CKD_pub(parent_pubkey=parent_pubkey,
parent_chaincode=parent_chaincode,
child_index=bfh(rev_hex(int_to_hex(child_index, 4))))
# helper function, callable with arbitrary 'child_index' byte-string.
# i.e.: 'child_index' does not need to fit into 32 bits here! (c.f. trustedcoin billing)
def _CKD_pub(parent_pubkey: bytes, parent_chaincode: bytes, child_index: bytes) -> Tuple[bytes, bytes]:
I = hmac_oneshot(parent_chaincode, parent_pubkey + child_index, hashlib.sha512)
pubkey = ecc.ECPrivkey(I[0:32]) + ecc.ECPubkey(parent_pubkey)
if pubkey.is_at_infinity():
raise ecc.InvalidECPointException()
child_pubkey = pubkey.get_public_key_bytes(compressed=True)
child_chaincode = I[32:]
return child_pubkey, child_chaincode
def xprv_header(xtype: str, *, net=None) -> bytes:
if net is None:
net = constants.net
return net.XPRV_HEADERS[xtype].to_bytes(length=4, byteorder="big")
def xpub_header(xtype: str, *, net=None) -> bytes:
if net is None:
net = constants.net
return net.XPUB_HEADERS[xtype].to_bytes(length=4, byteorder="big")
class InvalidMasterKeyVersionBytes(BitcoinException): pass
class BIP32Node(NamedTuple):
xtype: str
eckey: Union[ecc.ECPubkey, ecc.ECPrivkey]
chaincode: bytes
depth: int = 0
fingerprint: bytes = b'\x00'*4 # as in serialized format, this is the *parent's* fingerprint
child_number: bytes = b'\x00'*4
@classmethod
def from_xkey(cls, xkey: str, *, net=None) -> 'BIP32Node':
if net is None:
net = constants.net
xkey = DecodeBase58Check(xkey)
if len(xkey) != 78:
raise BitcoinException('Invalid length for extended key: {}'
.format(len(xkey)))
depth = xkey[4]
fingerprint = xkey[5:9]
child_number = xkey[9:13]
chaincode = xkey[13:13 + 32]
header = int.from_bytes(xkey[0:4], byteorder='big')
if header in net.XPRV_HEADERS_INV:
headers_inv = net.XPRV_HEADERS_INV
is_private = True
elif header in net.XPUB_HEADERS_INV:
headers_inv = net.XPUB_HEADERS_INV
is_private = False
else:
raise InvalidMasterKeyVersionBytes(f'Invalid extended key format: {hex(header)}')
xtype = headers_inv[header]
if is_private:
eckey = ecc.ECPrivkey(xkey[13 + 33:])
else:
eckey = ecc.ECPubkey(xkey[13 + 32:])
return BIP32Node(xtype=xtype,
eckey=eckey,
chaincode=chaincode,
depth=depth,
fingerprint=fingerprint,
child_number=child_number)
@classmethod
def from_rootseed(cls, seed: bytes, *, xtype: str) -> 'BIP32Node':
I = hmac_oneshot(b"Bitcoin seed", seed, hashlib.sha512)
master_k = I[0:32]
master_c = I[32:]
return BIP32Node(xtype=xtype,
eckey=ecc.ECPrivkey(master_k),
chaincode=master_c)
@classmethod
def from_bytes(cls, b: bytes) -> 'BIP32Node':
if len(b) != 78:
raise Exception(f"unexpected xkey raw bytes len {len(b)} != 78")
xkey = EncodeBase58Check(b)
return cls.from_xkey(xkey)
def to_xprv(self, *, net=None) -> str:
payload = self.to_xprv_bytes(net=net)
return EncodeBase58Check(payload)
def to_xprv_bytes(self, *, net=None) -> bytes:
if not self.is_private():
raise Exception("cannot serialize as xprv; private key missing")
payload = (xprv_header(self.xtype, net=net) +
bytes([self.depth]) +
self.fingerprint +
self.child_number +
self.chaincode +
bytes([0]) +
self.eckey.get_secret_bytes())
assert len(payload) == 78, f"unexpected xprv payload len {len(payload)}"
return payload
def to_xpub(self, *, net=None) -> str:
payload = self.to_xpub_bytes(net=net)
return EncodeBase58Check(payload)
def to_xpub_bytes(self, *, net=None) -> bytes:
payload = (xpub_header(self.xtype, net=net) +
bytes([self.depth]) +
self.fingerprint +
self.child_number +
self.chaincode +
self.eckey.get_public_key_bytes(compressed=True))
assert len(payload) == 78, f"unexpected xpub payload len {len(payload)}"
return payload
def to_xkey(self, *, net=None) -> str:
if self.is_private():
return self.to_xprv(net=net)
else:
return self.to_xpub(net=net)
def to_bytes(self, *, net=None) -> bytes:
if self.is_private():
return self.to_xprv_bytes(net=net)
else:
return self.to_xpub_bytes(net=net)
def convert_to_public(self) -> 'BIP32Node':
if not self.is_private():
return self
pubkey = ecc.ECPubkey(self.eckey.get_public_key_bytes())
return self._replace(eckey=pubkey)
def is_private(self) -> bool:
return isinstance(self.eckey, ecc.ECPrivkey)
def subkey_at_private_derivation(self, path: Union[str, Iterable[int]]) -> 'BIP32Node':
if path is None:
raise Exception("derivation path must not be None")
if isinstance(path, str):
path = convert_bip32_path_to_list_of_uint32(path)
if not self.is_private():
raise Exception("cannot do bip32 private derivation; private key missing")
if not path:
return self
depth = self.depth
chaincode = self.chaincode
privkey = self.eckey.get_secret_bytes()
for child_index in path:
parent_privkey = privkey
privkey, chaincode = CKD_priv(privkey, chaincode, child_index)
depth += 1
parent_pubkey = ecc.ECPrivkey(parent_privkey).get_public_key_bytes(compressed=True)
fingerprint = hash_160(parent_pubkey)[0:4]
child_number = child_index.to_bytes(length=4, byteorder="big")
return BIP32Node(xtype=self.xtype,
eckey=ecc.ECPrivkey(privkey),
chaincode=chaincode,
depth=depth,
fingerprint=fingerprint,
child_number=child_number)
def subkey_at_public_derivation(self, path: Union[str, Iterable[int]]) -> 'BIP32Node':
if path is None:
raise Exception("derivation path must not be None")
if isinstance(path, str):
path = convert_bip32_path_to_list_of_uint32(path)
if not path:
return self.convert_to_public()
depth = self.depth
chaincode = self.chaincode
pubkey = self.eckey.get_public_key_bytes(compressed=True)
for child_index in path:
parent_pubkey = pubkey
pubkey, chaincode = CKD_pub(pubkey, chaincode, child_index)
depth += 1
fingerprint = hash_160(parent_pubkey)[0:4]
child_number = child_index.to_bytes(length=4, byteorder="big")
return BIP32Node(xtype=self.xtype,
eckey=ecc.ECPubkey(pubkey),
chaincode=chaincode,
depth=depth,
fingerprint=fingerprint,
child_number=child_number)
def calc_fingerprint_of_this_node(self) -> bytes:
"""Returns the fingerprint of this node.
Note that self.fingerprint is of the *parent*.
"""
# TODO cache this
return hash_160(self.eckey.get_public_key_bytes(compressed=True))[0:4]
def xpub_type(x):
return BIP32Node.from_xkey(x).xtype
def is_xpub(text):
try:
node = BIP32Node.from_xkey(text)
return not node.is_private()
except:
return False
def is_xprv(text):
try:
node = BIP32Node.from_xkey(text)
return node.is_private()
except:
return False
def xpub_from_xprv(xprv):
return BIP32Node.from_xkey(xprv).to_xpub()
def convert_bip32_path_to_list_of_uint32(n: str) -> List[int]:
"""Convert bip32 path to list of uint32 integers with prime flags
m/0/-1/1' -> [0, 0x80000001, 0x80000001]
based on code in trezorlib
"""
if not n:
return []
if n.endswith("/"):
n = n[:-1]
n = n.split('/')
# cut leading "m" if present, but do not require it
if n[0] == "m":
n = n[1:]
path = []
for x in n:
if x == '':
# gracefully allow repeating "/" chars in path.
# makes concatenating paths easier
continue
prime = 0
if x.endswith("'") or x.endswith("h"):
x = x[:-1]
prime = BIP32_PRIME
if x.startswith('-'):
if prime:
raise ValueError(f"bip32 path child index is signalling hardened level in multiple ways")
prime = BIP32_PRIME
child_index = abs(int(x)) | prime
if child_index > UINT32_MAX:
raise ValueError(f"bip32 path child index too large: {child_index} > {UINT32_MAX}")
path.append(child_index)
return path
def convert_bip32_intpath_to_strpath(path: Sequence[int]) -> str:
s = "m/"
for child_index in path:
if not isinstance(child_index, int):
raise TypeError(f"bip32 path child index must be int: {child_index}")
if not (0 <= child_index <= UINT32_MAX):
raise ValueError(f"bip32 path child index out of range: {child_index}")
prime = ""
if child_index & BIP32_PRIME:
prime = "'"
child_index = child_index ^ BIP32_PRIME
s += str(child_index) + prime + '/'
# cut trailing "/"
s = s[:-1]
return s
def is_bip32_derivation(s: str) -> bool:
try:
if not (s == 'm' or s.startswith('m/')):
return False
convert_bip32_path_to_list_of_uint32(s)
except:
return False
else:
return True
def normalize_bip32_derivation(s: Optional[str]) -> Optional[str]:
if s is None:
return None
if not is_bip32_derivation(s):
raise ValueError(f"invalid bip32 derivation: {s}")
ints = convert_bip32_path_to_list_of_uint32(s)
return convert_bip32_intpath_to_strpath(ints)
def is_all_public_derivation(path: Union[str, Iterable[int]]) -> bool:
"""Returns whether all levels in path use non-hardened derivation."""
if isinstance(path, str):
path = convert_bip32_path_to_list_of_uint32(path)
for child_index in path:
if child_index < 0:
raise ValueError('the bip32 index needs to be non-negative')
if child_index & BIP32_PRIME:
return False
return True
def root_fp_and_der_prefix_from_xkey(xkey: str) -> Tuple[Optional[str], Optional[str]]:
"""Returns the root bip32 fingerprint and the derivation path from the
root to the given xkey, if they can be determined. Otherwise (None, None).
"""
node = BIP32Node.from_xkey(xkey)
derivation_prefix = None
root_fingerprint = None
assert node.depth >= 0, node.depth
if node.depth == 0:
derivation_prefix = 'm'
root_fingerprint = node.calc_fingerprint_of_this_node().hex().lower()
elif node.depth == 1:
child_number_int = int.from_bytes(node.child_number, 'big')
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
root_fingerprint = node.fingerprint.hex()
return root_fingerprint, derivation_prefix

View file

@ -24,18 +24,14 @@
# SOFTWARE. # SOFTWARE.
import hashlib import hashlib
from typing import List, Tuple, TYPE_CHECKING, Optional, Union from typing import List
from enum import IntEnum
from .util import bfh, bh2u, BitcoinException, assert_bytes, to_bytes, inv_dict from .util import bfh, bh2u, BitcoinException, print_error, assert_bytes, to_bytes, inv_dict
from . import version from . import version
from . import segwit_addr from . import segwit_addr
from . import constants from . import constants
from . import ecc from . import ecc
from .crypto import sha256d, sha256, hash_160, hmac_oneshot from .crypto import Hash, sha256, hash_160, hmac_oneshot
if TYPE_CHECKING:
from .network import Network
################################## transactions ################################## transactions
@ -45,154 +41,12 @@ COIN = 100000000
TOTAL_COIN_SUPPLY_LIMIT_IN_BTC = 21000000 TOTAL_COIN_SUPPLY_LIMIT_IN_BTC = 21000000
# supported types of transaction outputs # supported types of transaction outputs
# TODO kill these with fire
TYPE_ADDRESS = 0 TYPE_ADDRESS = 0
TYPE_PUBKEY = 1 TYPE_PUBKEY = 1
TYPE_SCRIPT = 2 TYPE_SCRIPT = 2
class opcodes(IntEnum): def rev_hex(s):
# push value
OP_0 = 0x00
OP_FALSE = OP_0
OP_PUSHDATA1 = 0x4c
OP_PUSHDATA2 = 0x4d
OP_PUSHDATA4 = 0x4e
OP_1NEGATE = 0x4f
OP_RESERVED = 0x50
OP_1 = 0x51
OP_TRUE = OP_1
OP_2 = 0x52
OP_3 = 0x53
OP_4 = 0x54
OP_5 = 0x55
OP_6 = 0x56
OP_7 = 0x57
OP_8 = 0x58
OP_9 = 0x59
OP_10 = 0x5a
OP_11 = 0x5b
OP_12 = 0x5c
OP_13 = 0x5d
OP_14 = 0x5e
OP_15 = 0x5f
OP_16 = 0x60
# control
OP_NOP = 0x61
OP_VER = 0x62
OP_IF = 0x63
OP_NOTIF = 0x64
OP_VERIF = 0x65
OP_VERNOTIF = 0x66
OP_ELSE = 0x67
OP_ENDIF = 0x68
OP_VERIFY = 0x69
OP_RETURN = 0x6a
# stack ops
OP_TOALTSTACK = 0x6b
OP_FROMALTSTACK = 0x6c
OP_2DROP = 0x6d
OP_2DUP = 0x6e
OP_3DUP = 0x6f
OP_2OVER = 0x70
OP_2ROT = 0x71
OP_2SWAP = 0x72
OP_IFDUP = 0x73
OP_DEPTH = 0x74
OP_DROP = 0x75
OP_DUP = 0x76
OP_NIP = 0x77
OP_OVER = 0x78
OP_PICK = 0x79
OP_ROLL = 0x7a
OP_ROT = 0x7b
OP_SWAP = 0x7c
OP_TUCK = 0x7d
# splice ops
OP_CAT = 0x7e
OP_SUBSTR = 0x7f
OP_LEFT = 0x80
OP_RIGHT = 0x81
OP_SIZE = 0x82
# bit logic
OP_INVERT = 0x83
OP_AND = 0x84
OP_OR = 0x85
OP_XOR = 0x86
OP_EQUAL = 0x87
OP_EQUALVERIFY = 0x88
OP_RESERVED1 = 0x89
OP_RESERVED2 = 0x8a
# numeric
OP_1ADD = 0x8b
OP_1SUB = 0x8c
OP_2MUL = 0x8d
OP_2DIV = 0x8e
OP_NEGATE = 0x8f
OP_ABS = 0x90
OP_NOT = 0x91
OP_0NOTEQUAL = 0x92
OP_ADD = 0x93
OP_SUB = 0x94
OP_MUL = 0x95
OP_DIV = 0x96
OP_MOD = 0x97
OP_LSHIFT = 0x98
OP_RSHIFT = 0x99
OP_BOOLAND = 0x9a
OP_BOOLOR = 0x9b
OP_NUMEQUAL = 0x9c
OP_NUMEQUALVERIFY = 0x9d
OP_NUMNOTEQUAL = 0x9e
OP_LESSTHAN = 0x9f
OP_GREATERTHAN = 0xa0
OP_LESSTHANOREQUAL = 0xa1
OP_GREATERTHANOREQUAL = 0xa2
OP_MIN = 0xa3
OP_MAX = 0xa4
OP_WITHIN = 0xa5
# crypto
OP_RIPEMD160 = 0xa6
OP_SHA1 = 0xa7
OP_SHA256 = 0xa8
OP_HASH160 = 0xa9
OP_HASH256 = 0xaa
OP_CODESEPARATOR = 0xab
OP_CHECKSIG = 0xac
OP_CHECKSIGVERIFY = 0xad
OP_CHECKMULTISIG = 0xae
OP_CHECKMULTISIGVERIFY = 0xaf
# expansion
OP_NOP1 = 0xb0
OP_CHECKLOCKTIMEVERIFY = 0xb1
OP_NOP2 = OP_CHECKLOCKTIMEVERIFY
OP_CHECKSEQUENCEVERIFY = 0xb2
OP_NOP3 = OP_CHECKSEQUENCEVERIFY
OP_NOP4 = 0xb3
OP_NOP5 = 0xb4
OP_NOP6 = 0xb5
OP_NOP7 = 0xb6
OP_NOP8 = 0xb7
OP_NOP9 = 0xb8
OP_NOP10 = 0xb9
OP_INVALIDOPCODE = 0xff
def hex(self) -> str:
return bytes([self]).hex()
def rev_hex(s: str) -> str:
return bh2u(bfh(s)[::-1]) return bh2u(bfh(s)[::-1])
@ -203,7 +57,7 @@ def int_to_hex(i: int, length: int=1) -> str:
if not isinstance(i, int): if not isinstance(i, int):
raise TypeError('{} instead of int'.format(i)) raise TypeError('{} instead of int'.format(i))
range_size = pow(256, length) range_size = pow(256, length)
if i < -(range_size//2) or i >= range_size: if i < -range_size/2 or i >= range_size:
raise OverflowError('cannot convert int {} to hex ({} bytes)'.format(i, length)) raise OverflowError('cannot convert int {} to hex ({} bytes)'.format(i, length))
if i < 0: if i < 0:
# two's complement # two's complement
@ -238,9 +92,6 @@ def script_num_to_hex(i: int) -> str:
def var_int(i: int) -> str: def var_int(i: int) -> str:
# https://en.bitcoin.it/wiki/Protocol_specification#Variable_length_integer # https://en.bitcoin.it/wiki/Protocol_specification#Variable_length_integer
# https://github.com/bitcoin/bitcoin/blob/efe1ee0d8d7f82150789f1f6840f139289628a2b/src/serialize.h#L247
# "CompactSize"
assert i >= 0, i
if i<0xfd: if i<0xfd:
return int_to_hex(i) return int_to_hex(i)
elif i<=0xffff: elif i<=0xffff:
@ -258,15 +109,15 @@ def witness_push(item: str) -> str:
return var_int(len(item) // 2) + item return var_int(len(item) // 2) + item
def _op_push(i: int) -> str: def op_push(i: int) -> str:
if i < opcodes.OP_PUSHDATA1: if i<0x4c: # OP_PUSHDATA1
return int_to_hex(i) return int_to_hex(i)
elif i <= 0xff: elif i<=0xff:
return opcodes.OP_PUSHDATA1.hex() + int_to_hex(i, 1) return '4c' + int_to_hex(i)
elif i <= 0xffff: elif i<=0xffff:
return opcodes.OP_PUSHDATA2.hex() + int_to_hex(i, 2) return '4d' + int_to_hex(i,2)
else: else:
return opcodes.OP_PUSHDATA4.hex() + int_to_hex(i, 4) return '4e' + int_to_hex(i,4)
def push_script(data: str) -> str: def push_script(data: str) -> str:
@ -277,170 +128,181 @@ def push_script(data: str) -> str:
ported from https://github.com/btcsuite/btcd/blob/fdc2bc867bda6b351191b5872d2da8270df00d13/txscript/scriptbuilder.go#L128 ported from https://github.com/btcsuite/btcd/blob/fdc2bc867bda6b351191b5872d2da8270df00d13/txscript/scriptbuilder.go#L128
""" """
data = bfh(data) data = bfh(data)
from .transaction import opcodes
data_len = len(data) data_len = len(data)
# "small integer" opcodes # "small integer" opcodes
if data_len == 0 or data_len == 1 and data[0] == 0: if data_len == 0 or data_len == 1 and data[0] == 0:
return opcodes.OP_0.hex() return bh2u(bytes([opcodes.OP_0]))
elif data_len == 1 and data[0] <= 16: elif data_len == 1 and data[0] <= 16:
return bh2u(bytes([opcodes.OP_1 - 1 + data[0]])) return bh2u(bytes([opcodes.OP_1 - 1 + data[0]]))
elif data_len == 1 and data[0] == 0x81: elif data_len == 1 and data[0] == 0x81:
return opcodes.OP_1NEGATE.hex() return bh2u(bytes([opcodes.OP_1NEGATE]))
return _op_push(data_len) + bh2u(data) return op_push(data_len) + bh2u(data)
def add_number_to_script(i: int) -> bytes: def add_number_to_script(i: int) -> bytes:
return bfh(push_script(script_num_to_hex(i))) return bfh(push_script(script_num_to_hex(i)))
def relayfee(network: 'Network' = None) -> int: hash_encode = lambda x: bh2u(x[::-1])
from .simple_config import FEERATE_DEFAULT_RELAY, FEERATE_MAX_RELAY hash_decode = lambda x: bfh(x)[::-1]
if network and network.relay_fee is not None: hmac_sha_512 = lambda x, y: hmac_oneshot(x, y, hashlib.sha512)
fee = network.relay_fee
else:
fee = FEERATE_DEFAULT_RELAY
fee = min(fee, FEERATE_MAX_RELAY)
fee = max(fee, 0)
return fee
def dust_threshold(network: 'Network'=None) -> int: def is_new_seed(x, prefix=version.SEED_PREFIX):
# Change <= dust threshold is added to the tx fee from . import mnemonic
return 182 * 3 * relayfee(network) // 1000 x = mnemonic.normalize_text(x)
s = bh2u(hmac_sha_512(b"Seed version", x.encode('utf8')))
return s.startswith(prefix)
def hash_encode(x: bytes) -> str: def is_old_seed(seed):
return bh2u(x[::-1]) from . import old_mnemonic, mnemonic
seed = mnemonic.normalize_text(seed)
words = seed.split()
try:
# checks here are deliberately left weak for legacy reasons, see #3149
old_mnemonic.mn_decode(words)
uses_electrum_words = True
except Exception:
uses_electrum_words = False
try:
seed = bfh(seed)
is_hex = (len(seed) == 16 or len(seed) == 32)
except Exception:
is_hex = False
return is_hex or (uses_electrum_words and (len(words) == 12 or len(words) == 24))
def hash_decode(x: str) -> bytes: def seed_type(x):
return bfh(x)[::-1] if is_old_seed(x):
return 'old'
elif is_new_seed(x):
return 'standard'
elif is_new_seed(x, version.SEED_PREFIX_SW):
return 'segwit'
elif is_new_seed(x, version.SEED_PREFIX_2FA):
return '2fa'
return ''
is_seed = lambda x: bool(seed_type(x))
############ functions from pywallet ##################### ############ functions from pywallet #####################
def hash160_to_b58_address(h160: bytes, addrtype: int) -> str: def hash160_to_b58_address(h160: bytes, addrtype):
s = bytes([addrtype]) + h160 s = bytes([addrtype])
s = s + sha256d(s)[0:4] s += h160
return base_encode(s, base=58) return base_encode(s+Hash(s)[0:4], base=58)
def b58_address_to_hash160(addr: str) -> Tuple[int, bytes]: def b58_address_to_hash160(addr):
addr = to_bytes(addr, 'ascii') addr = to_bytes(addr, 'ascii')
_bytes = DecodeBase58Check(addr) _bytes = base_decode(addr, 25, base=58)
if len(_bytes) != 21:
raise Exception(f'expected 21 payload bytes in base58 address. got: {len(_bytes)}')
return _bytes[0], _bytes[1:21] return _bytes[0], _bytes[1:21]
def hash160_to_p2pkh(h160: bytes, *, net=None) -> str: def hash160_to_p2pkh(h160, *, net=None):
if net is None: net = constants.net if net is None:
net = constants.net
return hash160_to_b58_address(h160, net.ADDRTYPE_P2PKH) return hash160_to_b58_address(h160, net.ADDRTYPE_P2PKH)
def hash160_to_p2sh(h160: bytes, *, net=None) -> str: def hash160_to_p2sh(h160, *, net=None):
if net is None: net = constants.net if net is None:
net = constants.net
return hash160_to_b58_address(h160, net.ADDRTYPE_P2SH) return hash160_to_b58_address(h160, net.ADDRTYPE_P2SH)
def public_key_to_p2pkh(public_key: bytes, *, net=None) -> str: def public_key_to_p2pkh(public_key: bytes) -> str:
if net is None: net = constants.net return hash160_to_p2pkh(hash_160(public_key))
return hash160_to_p2pkh(hash_160(public_key), net=net)
def hash_to_segwit_addr(h: bytes, witver: int, *, net=None) -> str: def hash_to_segwit_addr(h, witver, *, net=None):
if net is None: net = constants.net if net is None:
net = constants.net
return segwit_addr.encode(net.SEGWIT_HRP, witver, h) return segwit_addr.encode(net.SEGWIT_HRP, witver, h)
def public_key_to_p2wpkh(public_key: bytes, *, net=None) -> str: def public_key_to_p2wpkh(public_key):
if net is None: net = constants.net return hash_to_segwit_addr(hash_160(public_key), witver=0)
return hash_to_segwit_addr(hash_160(public_key), witver=0, net=net)
def script_to_p2wsh(script: str, *, net=None) -> str: def script_to_p2wsh(script):
if net is None: net = constants.net return hash_to_segwit_addr(sha256(bfh(script)), witver=0)
return hash_to_segwit_addr(sha256(bfh(script)), witver=0, net=net)
def p2wpkh_nested_script(pubkey: str) -> str: def p2wpkh_nested_script(pubkey):
pkh = bh2u(hash_160(bfh(pubkey))) pkh = bh2u(hash_160(bfh(pubkey)))
return '00' + push_script(pkh) return '00' + push_script(pkh)
def p2wsh_nested_script(witness_script: str) -> str: def p2wsh_nested_script(witness_script):
wsh = bh2u(sha256(bfh(witness_script))) wsh = bh2u(sha256(bfh(witness_script)))
return '00' + push_script(wsh) return '00' + push_script(wsh)
def pubkey_to_address(txin_type: str, pubkey: str, *, net=None) -> str: def pubkey_to_address(txin_type, pubkey):
if net is None: net = constants.net
if txin_type == 'p2pkh': if txin_type == 'p2pkh':
return public_key_to_p2pkh(bfh(pubkey), net=net) return public_key_to_p2pkh(bfh(pubkey))
elif txin_type == 'p2wpkh': elif txin_type == 'p2wpkh':
return public_key_to_p2wpkh(bfh(pubkey), net=net) return public_key_to_p2wpkh(bfh(pubkey))
elif txin_type == 'p2wpkh-p2sh': elif txin_type == 'p2wpkh-p2sh':
scriptSig = p2wpkh_nested_script(pubkey) scriptSig = p2wpkh_nested_script(pubkey)
return hash160_to_p2sh(hash_160(bfh(scriptSig)), net=net) return hash160_to_p2sh(hash_160(bfh(scriptSig)))
else: else:
raise NotImplementedError(txin_type) raise NotImplementedError(txin_type)
def redeem_script_to_address(txin_type, redeem_script):
# TODO this method is confusingly named
def redeem_script_to_address(txin_type: str, scriptcode: str, *, net=None) -> str:
if net is None: net = constants.net
if txin_type == 'p2sh': if txin_type == 'p2sh':
# given scriptcode is a redeem_script return hash160_to_p2sh(hash_160(bfh(redeem_script)))
return hash160_to_p2sh(hash_160(bfh(scriptcode)), net=net)
elif txin_type == 'p2wsh': elif txin_type == 'p2wsh':
# given scriptcode is a witness_script return script_to_p2wsh(redeem_script)
return script_to_p2wsh(scriptcode, net=net)
elif txin_type == 'p2wsh-p2sh': elif txin_type == 'p2wsh-p2sh':
# given scriptcode is a witness_script scriptSig = p2wsh_nested_script(redeem_script)
redeem_script = p2wsh_nested_script(scriptcode) return hash160_to_p2sh(hash_160(bfh(scriptSig)))
return hash160_to_p2sh(hash_160(bfh(redeem_script)), net=net)
else: else:
raise NotImplementedError(txin_type) raise NotImplementedError(txin_type)
def script_to_address(script: str, *, net=None) -> str: def script_to_address(script, *, net=None):
from .transaction import get_address_from_output_script from .transaction import get_address_from_output_script
return get_address_from_output_script(bfh(script), net=net) t, addr = get_address_from_output_script(bfh(script), net=net)
assert t == TYPE_ADDRESS
return addr
def address_to_script(addr, *, net=None):
def address_to_script(addr: str, *, net=None) -> str: if net is None:
if net is None: net = constants.net net = constants.net
if not is_address(addr, net=net):
raise BitcoinException(f"invalid bitcoin address: {addr}")
witver, witprog = segwit_addr.decode(net.SEGWIT_HRP, addr) witver, witprog = segwit_addr.decode(net.SEGWIT_HRP, addr)
if witprog is not None: if witprog is not None:
if not (0 <= witver <= 16): if not (0 <= witver <= 16):
raise BitcoinException(f'impossible witness version: {witver}') raise BitcoinException('impossible witness version: {}'.format(witver))
script = bh2u(add_number_to_script(witver)) OP_n = witver + 0x50 if witver > 0 else 0
script = bh2u(bytes([OP_n]))
script += push_script(bh2u(bytes(witprog))) script += push_script(bh2u(bytes(witprog)))
return script return script
addrtype, hash_160_ = b58_address_to_hash160(addr) addrtype, hash_160 = b58_address_to_hash160(addr)
if addrtype == net.ADDRTYPE_P2PKH: if addrtype == net.ADDRTYPE_P2PKH:
script = pubkeyhash_to_p2pkh_script(bh2u(hash_160_)) script = '76a9' # op_dup, op_hash_160
script += push_script(bh2u(hash_160))
script += '88ac' # op_equalverify, op_checksig
elif addrtype == net.ADDRTYPE_P2SH: elif addrtype == net.ADDRTYPE_P2SH:
script = opcodes.OP_HASH160.hex() script = 'a9' # op_hash_160
script += push_script(bh2u(hash_160_)) script += push_script(bh2u(hash_160))
script += opcodes.OP_EQUAL.hex() script += '87' # op_equal
else: else:
raise BitcoinException(f'unknown address type: {addrtype}') raise BitcoinException('unknown address type: {}'.format(addrtype))
return script return script
def address_to_scripthash(addr: str) -> str: def address_to_scripthash(addr):
script = address_to_script(addr) script = address_to_script(addr)
return script_to_scripthash(script) return script_to_scripthash(script)
def script_to_scripthash(script: str) -> str: def script_to_scripthash(script):
h = sha256(bfh(script))[0:32] h = sha256(bytes.fromhex(script))[0:32]
return bh2u(bytes(reversed(h))) return bh2u(bytes(reversed(h)))
def public_key_to_p2pk_script(pubkey: str) -> str: def public_key_to_p2pk_script(pubkey):
return push_script(pubkey) + opcodes.OP_CHECKSIG.hex() script = push_script(pubkey)
script += 'ac' # op_checksig
def pubkeyhash_to_p2pkh_script(pubkey_hash160: str) -> str:
script = bytes([opcodes.OP_DUP, opcodes.OP_HASH160]).hex()
script += push_script(pubkey_hash160)
script += bytes([opcodes.OP_EQUALVERIFY, opcodes.OP_CHECKSIG]).hex()
return script return script
__b58chars = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' __b58chars = b'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
assert len(__b58chars) == 58 assert len(__b58chars) == 58
@ -448,7 +310,7 @@ __b43chars = b'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ$*+-./:'
assert len(__b43chars) == 43 assert len(__b43chars) == 43
def base_encode(v: bytes, *, base: int) -> str: def base_encode(v: bytes, base: int) -> str:
""" encode v, which is a string of bytes, to base58.""" """ encode v, which is a string of bytes, to base58."""
assert_bytes(v) assert_bytes(v)
if base not in (58, 43): if base not in (58, 43):
@ -457,11 +319,8 @@ def base_encode(v: bytes, *, base: int) -> str:
if base == 43: if base == 43:
chars = __b43chars chars = __b43chars
long_value = 0 long_value = 0
power_of_base = 1 for (i, c) in enumerate(v[::-1]):
for c in v[::-1]: long_value += (256**i) * c
# naive but slow variant: long_value += (256**i) * c
long_value += power_of_base * c
power_of_base <<= 8
result = bytearray() result = bytearray()
while long_value >= base: while long_value >= base:
div, mod = divmod(long_value, base) div, mod = divmod(long_value, base)
@ -481,7 +340,7 @@ def base_encode(v: bytes, *, base: int) -> str:
return result.decode('ascii') return result.decode('ascii')
def base_decode(v: Union[bytes, str], *, base: int, length: int = None) -> Optional[bytes]: def base_decode(v, length, base):
""" decode v into a string of len bytes.""" """ decode v into a string of len bytes."""
# assert_bytes(v) # assert_bytes(v)
v = to_bytes(v, 'ascii') v = to_bytes(v, 'ascii')
@ -491,14 +350,11 @@ def base_decode(v: Union[bytes, str], *, base: int, length: int = None) -> Optio
if base == 43: if base == 43:
chars = __b43chars chars = __b43chars
long_value = 0 long_value = 0
power_of_base = 1 for (i, c) in enumerate(v[::-1]):
for c in v[::-1]:
digit = chars.find(bytes([c])) digit = chars.find(bytes([c]))
if digit == -1: if digit == -1:
raise ValueError('Forbidden character {} for base {}'.format(c, base)) raise ValueError('Forbidden character {} for base {}'.format(c, base))
# naive but slow variant: long_value += digit * (base**i) long_value += digit * (base**i)
long_value += digit * power_of_base
power_of_base *= base
result = bytearray() result = bytearray()
while long_value >= 256: while long_value >= 256:
div, mod = divmod(long_value, 256) div, mod = divmod(long_value, 256)
@ -522,20 +378,21 @@ class InvalidChecksum(Exception):
pass pass
def EncodeBase58Check(vchIn: bytes) -> str: def EncodeBase58Check(vchIn):
hash = sha256d(vchIn) hash = Hash(vchIn)
return base_encode(vchIn + hash[0:4], base=58) return base_encode(vchIn + hash[0:4], base=58)
def DecodeBase58Check(psz: Union[bytes, str]) -> bytes: def DecodeBase58Check(psz):
vchRet = base_decode(psz, base=58) vchRet = base_decode(psz, None, base=58)
payload = vchRet[0:-4] key = vchRet[0:-4]
csum_found = vchRet[-4:] csum = vchRet[-4:]
csum_calculated = sha256d(payload)[0:4] hash = Hash(key)
if csum_calculated != csum_found: cs32 = hash[0:4]
raise InvalidChecksum(f'calculated {bh2u(csum_calculated)}, found {bh2u(csum_found)}') if cs32 != csum:
raise InvalidChecksum('expected {}, actual {}'.format(bh2u(cs32), bh2u(csum)))
else: else:
return payload return key
# backwards compat # backwards compat
@ -552,8 +409,11 @@ WIF_SCRIPT_TYPES = {
WIF_SCRIPT_TYPES_INV = inv_dict(WIF_SCRIPT_TYPES) WIF_SCRIPT_TYPES_INV = inv_dict(WIF_SCRIPT_TYPES)
def is_segwit_script_type(txin_type: str) -> bool: PURPOSE48_SCRIPT_TYPES = {
return txin_type in ('p2wpkh', 'p2wpkh-p2sh', 'p2wsh', 'p2wsh-p2sh') 'p2wsh-p2sh': 1, # specifically multisig
'p2wsh': 2, # specifically multisig
}
PURPOSE48_SCRIPT_TYPES_INV = inv_dict(PURPOSE48_SCRIPT_TYPES)
def serialize_privkey(secret: bytes, compressed: bool, txin_type: str, def serialize_privkey(secret: bytes, compressed: bool, txin_type: str,
@ -573,7 +433,7 @@ def serialize_privkey(secret: bytes, compressed: bool, txin_type: str,
return '{}:{}'.format(txin_type, base58_wif) return '{}:{}'.format(txin_type, base58_wif)
def deserialize_privkey(key: str) -> Tuple[str, bytes, bool]: def deserialize_privkey(key: str) -> (str, bytes, bool):
if is_minikey(key): if is_minikey(key):
return 'p2pkh', minikey_to_private_key(key), False return 'p2pkh', minikey_to_private_key(key), False
@ -603,70 +463,53 @@ def deserialize_privkey(key: str) -> Tuple[str, bytes, bool]:
if len(vch) not in [33, 34]: if len(vch) not in [33, 34]:
raise BitcoinException('invalid vch len for WIF key: {}'.format(len(vch))) raise BitcoinException('invalid vch len for WIF key: {}'.format(len(vch)))
compressed = False compressed = len(vch) == 34
if len(vch) == 34:
if vch[33] == 0x01:
compressed = True
else:
raise BitcoinException(f'invalid WIF key. length suggests compressed pubkey, '
f'but last byte is {vch[33]} != 0x01')
if is_segwit_script_type(txin_type) and not compressed:
raise BitcoinException('only compressed public keys can be used in segwit scripts')
secret_bytes = vch[1:33] secret_bytes = vch[1:33]
# we accept secrets outside curve range; cast into range here: # we accept secrets outside curve range; cast into range here:
secret_bytes = ecc.ECPrivkey.normalize_secret_bytes(secret_bytes) secret_bytes = ecc.ECPrivkey.normalize_secret_bytes(secret_bytes)
return txin_type, secret_bytes, compressed return txin_type, secret_bytes, compressed
def is_compressed_privkey(sec: str) -> bool: def is_compressed(sec):
return deserialize_privkey(sec)[2] return deserialize_privkey(sec)[2]
def address_from_private_key(sec: str) -> str: def address_from_private_key(sec):
txin_type, privkey, compressed = deserialize_privkey(sec) txin_type, privkey, compressed = deserialize_privkey(sec)
public_key = ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed) public_key = ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed)
return pubkey_to_address(txin_type, public_key) return pubkey_to_address(txin_type, public_key)
def is_segwit_address(addr: str, *, net=None) -> bool: def is_segwit_address(addr):
if net is None: net = constants.net
try: try:
witver, witprog = segwit_addr.decode(net.SEGWIT_HRP, addr) witver, witprog = segwit_addr.decode(constants.net.SEGWIT_HRP, addr)
except Exception as e: except Exception as e:
return False return False
return witprog is not None return witprog is not None
def is_b58_address(addr: str, *, net=None) -> bool: def is_b58_address(addr):
if net is None: net = constants.net
try: try:
# test length, checksum, encoding:
addrtype, h = b58_address_to_hash160(addr) addrtype, h = b58_address_to_hash160(addr)
except Exception as e: except Exception as e:
return False return False
if addrtype not in [net.ADDRTYPE_P2PKH, net.ADDRTYPE_P2SH]: if addrtype not in [constants.net.ADDRTYPE_P2PKH, constants.net.ADDRTYPE_P2SH]:
return False return False
return True return addr == hash160_to_b58_address(h, addrtype)
def is_address(addr: str, *, net=None) -> bool: def is_address(addr):
if net is None: net = constants.net return is_segwit_address(addr) or is_b58_address(addr)
return is_segwit_address(addr, net=net) \
or is_b58_address(addr, net=net)
def is_private_key(key: str, *, raise_on_error=False) -> bool: def is_private_key(key):
try: try:
deserialize_privkey(key) k = deserialize_privkey(key)
return True return k is not False
except BaseException as e: except:
if raise_on_error:
raise
return False return False
########### end pywallet functions ####################### ########### end pywallet functions #######################
def is_minikey(text: str) -> bool: def is_minikey(text):
# Minikeys are typically 22 or 30 characters, but this routine # Minikeys are typically 22 or 30 characters, but this routine
# permits any length of 20 or more provided the minikey is valid. # permits any length of 20 or more provided the minikey is valid.
# A valid minikey must begin with an 'S', be in base58, and when # A valid minikey must begin with an 'S', be in base58, and when
@ -676,5 +519,264 @@ def is_minikey(text: str) -> bool:
and all(ord(c) in __b58chars for c in text) and all(ord(c) in __b58chars for c in text)
and sha256(text + '?')[0] == 0x00) and sha256(text + '?')[0] == 0x00)
def minikey_to_private_key(text: str) -> bytes: def minikey_to_private_key(text):
return sha256(text) return sha256(text)
###################################### BIP32 ##############################
BIP32_PRIME = 0x80000000
def protect_against_invalid_ecpoint(func):
def func_wrapper(*args):
n = args[-1]
while True:
is_prime = n & BIP32_PRIME
try:
return func(*args[:-1], n=n)
except ecc.InvalidECPointException:
print_error('bip32 protect_against_invalid_ecpoint: skipping index')
n += 1
is_prime2 = n & BIP32_PRIME
if is_prime != is_prime2: raise OverflowError()
return func_wrapper
# Child private key derivation function (from master private key)
# k = master private key (32 bytes)
# c = master chain code (extra entropy for key derivation) (32 bytes)
# n = the index of the key we want to derive. (only 32 bits will be used)
# If n is hardened (i.e. the 32nd bit is set), the resulting private key's
# corresponding public key can NOT be determined without the master private key.
# However, if n is not hardened, the resulting private key's corresponding
# public key can be determined without the master private key.
@protect_against_invalid_ecpoint
def CKD_priv(k, c, n):
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
is_prime = n & BIP32_PRIME
return _CKD_priv(k, c, bfh(rev_hex(int_to_hex(n,4))), is_prime)
def _CKD_priv(k, c, s, is_prime):
try:
keypair = ecc.ECPrivkey(k)
except ecc.InvalidECPointException as e:
raise BitcoinException('Impossible xprv (not within curve order)') from e
cK = keypair.get_public_key_bytes(compressed=True)
data = bytes([0]) + k + s if is_prime else cK + s
I = hmac_oneshot(c, data, hashlib.sha512)
I_left = ecc.string_to_number(I[0:32])
k_n = (I_left + ecc.string_to_number(k)) % ecc.CURVE_ORDER
if I_left >= ecc.CURVE_ORDER or k_n == 0:
raise ecc.InvalidECPointException()
k_n = ecc.number_to_string(k_n, ecc.CURVE_ORDER)
c_n = I[32:]
return k_n, c_n
# Child public key derivation function (from public key only)
# K = master public key
# c = master chain code
# n = index of key we want to derive
# This function allows us to find the nth public key, as long as n is
# not hardened. If n is hardened, we need the master private key to find it.
@protect_against_invalid_ecpoint
def CKD_pub(cK, c, n):
if n < 0: raise ValueError('the bip32 index needs to be non-negative')
if n & BIP32_PRIME: raise Exception()
return _CKD_pub(cK, c, bfh(rev_hex(int_to_hex(n,4))))
# helper function, callable with arbitrary string.
# note: 's' does not need to fit into 32 bits here! (c.f. trustedcoin billing)
def _CKD_pub(cK, c, s):
I = hmac_oneshot(c, cK + s, hashlib.sha512)
pubkey = ecc.ECPrivkey(I[0:32]) + ecc.ECPubkey(cK)
if pubkey.is_at_infinity():
raise ecc.InvalidECPointException()
cK_n = pubkey.get_public_key_bytes(compressed=True)
c_n = I[32:]
return cK_n, c_n
def xprv_header(xtype, *, net=None):
if net is None:
net = constants.net
return bfh("%08x" % net.XPRV_HEADERS[xtype])
def xpub_header(xtype, *, net=None):
if net is None:
net = constants.net
return bfh("%08x" % net.XPUB_HEADERS[xtype])
def serialize_xprv(xtype, c, k, depth=0, fingerprint=b'\x00'*4,
child_number=b'\x00'*4, *, net=None):
if not ecc.is_secret_within_curve_range(k):
raise BitcoinException('Impossible xprv (not within curve order)')
xprv = xprv_header(xtype, net=net) \
+ bytes([depth]) + fingerprint + child_number + c + bytes([0]) + k
return EncodeBase58Check(xprv)
def serialize_xpub(xtype, c, cK, depth=0, fingerprint=b'\x00'*4,
child_number=b'\x00'*4, *, net=None):
xpub = xpub_header(xtype, net=net) \
+ bytes([depth]) + fingerprint + child_number + c + cK
return EncodeBase58Check(xpub)
class InvalidMasterKeyVersionBytes(BitcoinException): pass
def deserialize_xkey(xkey, prv, *, net=None):
if net is None:
net = constants.net
xkey = DecodeBase58Check(xkey)
if len(xkey) != 78:
raise BitcoinException('Invalid length for extended key: {}'
.format(len(xkey)))
depth = xkey[4]
fingerprint = xkey[5:9]
child_number = xkey[9:13]
c = xkey[13:13+32]
header = int('0x' + bh2u(xkey[0:4]), 16)
headers = net.XPRV_HEADERS if prv else net.XPUB_HEADERS
if header not in headers.values():
raise InvalidMasterKeyVersionBytes('Invalid extended key format: {}'
.format(hex(header)))
xtype = list(headers.keys())[list(headers.values()).index(header)]
n = 33 if prv else 32
K_or_k = xkey[13+n:]
if prv and not ecc.is_secret_within_curve_range(K_or_k):
raise BitcoinException('Impossible xprv (not within curve order)')
return xtype, depth, fingerprint, child_number, c, K_or_k
def deserialize_xpub(xkey, *, net=None):
return deserialize_xkey(xkey, False, net=net)
def deserialize_xprv(xkey, *, net=None):
return deserialize_xkey(xkey, True, net=net)
def xpub_type(x):
return deserialize_xpub(x)[0]
def is_xpub(text):
try:
deserialize_xpub(text)
return True
except:
return False
def is_xprv(text):
try:
deserialize_xprv(text)
return True
except:
return False
def xpub_from_xprv(xprv):
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
def bip32_root(seed, xtype):
I = hmac_oneshot(b"Bitcoin seed", seed, hashlib.sha512)
master_k = I[0:32]
master_c = I[32:]
# create xprv first, as that will check if master_k is within curve order
xprv = serialize_xprv(xtype, master_c, master_k)
cK = ecc.ECPrivkey(master_k).get_public_key_bytes(compressed=True)
xpub = serialize_xpub(xtype, master_c, cK)
return xprv, xpub
def xpub_from_pubkey(xtype, cK):
if cK[0] not in (0x02, 0x03):
raise ValueError('Unexpected first byte: {}'.format(cK[0]))
return serialize_xpub(xtype, b'\x00'*32, cK)
def bip32_derivation(s):
if not s.startswith('m/'):
raise ValueError('invalid bip32 derivation path: {}'.format(s))
s = s[2:]
for n in s.split('/'):
if n == '': continue
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
yield i
def convert_bip32_path_to_list_of_uint32(n: str) -> List[int]:
"""Convert bip32 path to list of uint32 integers with prime flags
m/0/-1/1' -> [0, 0x80000001, 0x80000001]
based on code in trezorlib
"""
path = []
for x in n.split('/')[1:]:
if x == '': continue
prime = 0
if x.endswith("'"):
x = x.replace('\'', '')
prime = BIP32_PRIME
if x.startswith('-'):
prime = BIP32_PRIME
path.append(abs(int(x)) | prime)
return path
def is_bip32_derivation(x):
try:
[ i for i in bip32_derivation(x)]
return True
except :
return False
def bip32_private_derivation(xprv, branch, sequence):
if not sequence.startswith(branch):
raise ValueError('incompatible branch ({}) and sequence ({})'
.format(branch, sequence))
if branch == sequence:
return xprv, xpub_from_xprv(xprv)
xtype, depth, fingerprint, child_number, c, k = deserialize_xprv(xprv)
sequence = sequence[len(branch):]
for n in sequence.split('/'):
if n == '': continue
i = int(n[:-1]) + BIP32_PRIME if n[-1] == "'" else int(n)
parent_k = k
k, c = CKD_priv(k, c, i)
depth += 1
parent_cK = ecc.ECPrivkey(parent_k).get_public_key_bytes(compressed=True)
fingerprint = hash_160(parent_cK)[0:4]
child_number = bfh("%08X"%i)
cK = ecc.ECPrivkey(k).get_public_key_bytes(compressed=True)
xpub = serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
xprv = serialize_xprv(xtype, c, k, depth, fingerprint, child_number)
return xprv, xpub
def bip32_public_derivation(xpub, branch, sequence):
xtype, depth, fingerprint, child_number, c, cK = deserialize_xpub(xpub)
if not sequence.startswith(branch):
raise ValueError('incompatible branch ({}) and sequence ({})'
.format(branch, sequence))
sequence = sequence[len(branch):]
for n in sequence.split('/'):
if n == '': continue
i = int(n)
parent_cK = cK
cK, c = CKD_pub(cK, c, i)
depth += 1
fingerprint = hash_160(parent_cK)[0:4]
child_number = bfh("%08X"%i)
return serialize_xpub(xtype, c, cK, depth, fingerprint, child_number)
def bip32_private_key(sequence, k, chain):
for i in sequence:
k, chain = CKD_priv(k, chain, i)
return k

View file

@ -22,26 +22,14 @@
# SOFTWARE. # SOFTWARE.
import os import os
import threading import threading
from typing import Optional, Dict, Mapping, Sequence
import hashlib
import hmac
from . import util from . import util
from .bitcoin import hash_encode, int_to_hex, rev_hex from .bitcoin import Hash, hash_encode, int_to_hex, rev_hex
from .crypto import sha256d
from . import constants from . import constants
from .util import bfh, bh2u from .util import bfh, bh2u
from .simple_config import SimpleConfig
from .logging import get_logger, Logger
MAX_TARGET = 0x00000000FFFF0000000000000000000000000000000000000000000000000000
_logger = get_logger(__name__)
HEADER_SIZE = 112 # bytes
MAX_TARGET = 0x0000FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
GENESIS_BITS = 0x1f00ffff
N_TARGET_TIMESPAN = 150
class MissingHeader(Exception): class MissingHeader(Exception):
pass pass
@ -49,167 +37,88 @@ class MissingHeader(Exception):
class InvalidHeader(Exception): class InvalidHeader(Exception):
pass pass
def serialize_header(header_dict: dict) -> str: def serialize_header(res):
s = int_to_hex(header_dict['version'], 4) \ s = int_to_hex(res.get('version'), 4) \
+ rev_hex(header_dict['prev_block_hash']) \ + rev_hex(res.get('prev_block_hash')) \
+ rev_hex(header_dict['merkle_root']) \ + rev_hex(res.get('merkle_root')) \
+ rev_hex(header_dict['claim_trie_root']) \ + int_to_hex(int(res.get('timestamp')), 4) \
+ int_to_hex(int(header_dict['timestamp']), 4) \ + int_to_hex(int(res.get('bits')), 4) \
+ int_to_hex(int(header_dict['bits']), 4) \ + int_to_hex(int(res.get('nonce')), 4)
+ int_to_hex(int(header_dict['nonce']), 4)
return s return s
def deserialize_header(s: bytes, height: int) -> dict: def deserialize_header(s, height):
if not s: if not s:
raise InvalidHeader('Invalid header: {}'.format(s)) raise InvalidHeader('Invalid header: {}'.format(s))
if len(s) != HEADER_SIZE: if len(s) != 80:
raise InvalidHeader('Invalid header length: {}'.format(len(s))) raise InvalidHeader('Invalid header length: {}'.format(len(s)))
hex_to_int = lambda s: int.from_bytes(s, byteorder='little') hex_to_int = lambda s: int('0x' + bh2u(s[::-1]), 16)
h = {} h = {}
h['version'] = hex_to_int(s[0:4]) h['version'] = hex_to_int(s[0:4])
h['prev_block_hash'] = hash_encode(s[4:36]) h['prev_block_hash'] = hash_encode(s[4:36])
h['merkle_root'] = hash_encode(s[36:68]) h['merkle_root'] = hash_encode(s[36:68])
h['claim_trie_root'] = hash_encode(s[68:100]) h['timestamp'] = hex_to_int(s[68:72])
h['timestamp'] = hex_to_int(s[100:104]) h['bits'] = hex_to_int(s[72:76])
h['bits'] = hex_to_int(s[104:108]) h['nonce'] = hex_to_int(s[76:80])
h['nonce'] = hex_to_int(s[108:112])
h['block_height'] = height h['block_height'] = height
return h return h
def hash_header(header: dict) -> str: def hash_header(header):
if header is None: if header is None:
return '0' * 64 return '0' * 64
if header.get('prev_block_hash') is None: if header.get('prev_block_hash') is None:
header['prev_block_hash'] = '00'*32 header['prev_block_hash'] = '00'*32
return hash_raw_header(serialize_header(header)) return hash_encode(Hash(bfh(serialize_header(header))))
def pow_hash_header(header: dict) -> str:
if header is None:
return '0' * 64
return hash_encode(PoWHash(bfh(serialize_header(header))))
def sha256(x):
return hashlib.sha256(x).digest()
def sha512(x):
return hashlib.sha512(x).digest()
def ripemd160(x):
h = hashlib.new('ripemd160')
h.update(x)
return h.digest()
def Hash(x):
return sha256(sha256(x))
def hash_raw_header(header: str) -> str:
return hash_encode(sha256d(bfh(header)))
def PoWHash(x):
r = sha512(Hash(x))
r1 = ripemd160(r[:len(r) // 2])
r2 = ripemd160(r[len(r) // 2:])
r3 = Hash(r1 + r2)
return r3
# key: blockhash hex at forkpoint
# the chain at some key is the best chain that includes the given hash
blockchains = {} # type: Dict[str, Blockchain]
blockchains_lock = threading.RLock()
def read_blockchains(config: 'SimpleConfig'): blockchains = {}
best_chain = Blockchain(config=config,
forkpoint=0, def read_blockchains(config):
parent=None, blockchains[0] = Blockchain(config, 0, None)
forkpoint_hash=constants.net.GENESIS,
prev_hash=None)
blockchains[constants.net.GENESIS] = best_chain
# consistency checks
if best_chain.height() > constants.net.max_checkpoint():
header_after_cp = best_chain.read_header(constants.net.max_checkpoint()+1)
if not header_after_cp or not best_chain.can_connect(header_after_cp, check_height=False):
_logger.info("[blockchain] deleting best chain. cannot connect header after last cp to last cp.")
os.unlink(best_chain.path())
best_chain.update_size()
# forks
fdir = os.path.join(util.get_headers_dir(config), 'forks') fdir = os.path.join(util.get_headers_dir(config), 'forks')
util.make_dir(fdir) util.make_dir(fdir)
# files are named as: fork2_{forkpoint}_{prev_hash}_{first_hash} l = filter(lambda x: x.startswith('fork_'), os.listdir(fdir))
l = filter(lambda x: x.startswith('fork2_') and '.' not in x, os.listdir(fdir)) l = sorted(l, key = lambda x: int(x.split('_')[1]))
l = sorted(l, key=lambda x: int(x.split('_')[1])) # sort by forkpoint
def delete_chain(filename, reason):
_logger.info(f"[blockchain] deleting chain {filename}: {reason}")
os.unlink(os.path.join(fdir, filename))
def instantiate_chain(filename):
__, forkpoint, prev_hash, first_hash = filename.split('_')
forkpoint = int(forkpoint)
prev_hash = (64-len(prev_hash)) * "0" + prev_hash # left-pad with zeroes
first_hash = (64-len(first_hash)) * "0" + first_hash
# forks below the max checkpoint are not allowed
if forkpoint <= constants.net.max_checkpoint():
delete_chain(filename, "deleting fork below max checkpoint")
return
# find parent (sorting by forkpoint guarantees it's already instantiated)
for parent in blockchains.values():
if parent.check_hash(forkpoint - 1, prev_hash):
break
else:
delete_chain(filename, "cannot find parent for chain")
return
b = Blockchain(config=config,
forkpoint=forkpoint,
parent=parent,
forkpoint_hash=first_hash,
prev_hash=prev_hash)
# consistency checks
h = b.read_header(b.forkpoint)
if first_hash != hash_header(h):
delete_chain(filename, "incorrect first hash for chain")
return
if not b.parent.can_connect(h, check_height=False):
delete_chain(filename, "cannot connect chain to parent")
return
chain_id = b.get_id()
assert first_hash == chain_id, (first_hash, chain_id)
blockchains[chain_id] = b
for filename in l: for filename in l:
instantiate_chain(filename) forkpoint = int(filename.split('_')[2])
parent_id = int(filename.split('_')[1])
b = Blockchain(config, forkpoint, parent_id)
h = b.read_header(b.forkpoint)
if b.parent().can_connect(h, check_height=False):
blockchains[b.forkpoint] = b
else:
util.print_error("cannot connect", filename)
return blockchains
def check_header(header):
if type(header) is not dict:
return False
for b in blockchains.values():
if b.check_header(header):
return b
return False
def can_connect(header):
for b in blockchains.values():
if b.can_connect(header):
return b
return False
def get_best_chain() -> 'Blockchain': class Blockchain(util.PrintError):
return blockchains[constants.net.GENESIS]
# block hash -> chain work; up to and including that block
_CHAINWORK_CACHE = {
"0000000000000000000000000000000000000000000000000000000000000000": 0, # virtual block at height -1
} # type: Dict[str, int]
class Blockchain(Logger):
""" """
Manages blockchain headers and their verification Manages blockchain headers and their verification
""" """
def __init__(self, config: SimpleConfig, forkpoint: int, parent: Optional['Blockchain'], def __init__(self, config, forkpoint, parent_id):
forkpoint_hash: str, prev_hash: Optional[str]):
assert isinstance(forkpoint_hash, str) and len(forkpoint_hash) == 64, forkpoint_hash
assert (prev_hash is None) or (isinstance(prev_hash, str) and len(prev_hash) == 64), prev_hash
# assert (parent is None) == (forkpoint == 0)
if 0 < forkpoint <= constants.net.max_checkpoint():
raise Exception(f"cannot fork below max checkpoint. forkpoint: {forkpoint}")
Logger.__init__(self)
self.config = config self.config = config
self.forkpoint = forkpoint # height of first header self.catch_up = None # interface catching up
self.parent = parent self.forkpoint = forkpoint
self._forkpoint_hash = forkpoint_hash # blockhash at forkpoint. "first hash" self.checkpoints = constants.net.CHECKPOINTS
self._prev_hash = prev_hash # blockhash immediately before forkpoint self.parent_id = parent_id
assert parent_id != forkpoint
self.lock = threading.RLock() self.lock = threading.RLock()
self.update_size() with self.lock:
self.update_size()
def with_lock(func): def with_lock(func):
def func_wrapper(self, *args, **kwargs): def func_wrapper(self, *args, **kwargs):
@ -217,160 +126,84 @@ class Blockchain(Logger):
return func(self, *args, **kwargs) return func(self, *args, **kwargs)
return func_wrapper return func_wrapper
@property def parent(self):
def checkpoints(self): return blockchains[self.parent_id]
return constants.net.CHECKPOINTS
def get_max_child(self) -> Optional[int]: def get_max_child(self):
children = self.get_direct_children() children = list(filter(lambda y: y.parent_id==self.forkpoint, blockchains.values()))
return max([x.forkpoint for x in children]) if children else None return max([x.forkpoint for x in children]) if children else None
def get_max_forkpoint(self) -> int: def get_forkpoint(self):
"""Returns the max height where there is a fork
related to this chain.
"""
mc = self.get_max_child() mc = self.get_max_child()
return mc if mc is not None else self.forkpoint return mc if mc is not None else self.forkpoint
def get_direct_children(self) -> Sequence['Blockchain']: def get_branch_size(self):
with blockchains_lock: return self.height() - self.get_forkpoint() + 1
return list(filter(lambda y: y.parent==self, blockchains.values()))
def get_parent_heights(self) -> Mapping['Blockchain', int]: def get_name(self):
"""Returns map: (parent chain -> height of last common block)""" return self.get_hash(self.get_forkpoint()).lstrip('00')[0:10]
with blockchains_lock:
result = {self: self.height()}
chain = self
while True:
parent = chain.parent
if parent is None: break
result[parent] = chain.forkpoint - 1
chain = parent
return result
def get_height_of_last_common_block_with_chain(self, other_chain: 'Blockchain') -> int: def check_header(self, header):
last_common_block_height = 0
our_parents = self.get_parent_heights()
their_parents = other_chain.get_parent_heights()
for chain in our_parents:
if chain in their_parents:
h = min(our_parents[chain], their_parents[chain])
last_common_block_height = max(last_common_block_height, h)
return last_common_block_height
@with_lock
def get_branch_size(self) -> int:
return self.height() - self.get_max_forkpoint() + 1
def get_name(self) -> str:
return self.get_hash(self.get_max_forkpoint()).lstrip('0')[0:10]
def check_header(self, header: dict) -> bool:
header_hash = hash_header(header) header_hash = hash_header(header)
height = header.get('block_height') height = header.get('block_height')
return self.check_hash(height, header_hash) return header_hash == self.get_hash(height)
def check_hash(self, height: int, header_hash: str) -> bool: def fork(parent, header):
"""Returns whether the hash of the block at given height
is the given hash.
"""
assert isinstance(header_hash, str) and len(header_hash) == 64, header_hash # hex
try:
return header_hash == self.get_hash(height)
except Exception:
return False
def fork(parent, header: dict) -> 'Blockchain':
if not parent.can_connect(header, check_height=False):
raise Exception("forking header does not connect to parent chain")
forkpoint = header.get('block_height') forkpoint = header.get('block_height')
self = Blockchain(config=parent.config, self = Blockchain(parent.config, forkpoint, parent.forkpoint)
forkpoint=forkpoint,
parent=parent,
forkpoint_hash=hash_header(header),
prev_hash=parent.get_hash(forkpoint-1))
self.assert_headers_file_available(parent.path())
open(self.path(), 'w+').close() open(self.path(), 'w+').close()
self.save_header(header) self.save_header(header)
# put into global dict. note that in some cases
# save_header might have already put it there but that's OK
chain_id = self.get_id()
with blockchains_lock:
blockchains[chain_id] = self
return self return self
@with_lock def height(self):
def height(self) -> int:
return self.forkpoint + self.size() - 1 return self.forkpoint + self.size() - 1
@with_lock def size(self):
def size(self) -> int: with self.lock:
return self._size return self._size
@with_lock def update_size(self):
def update_size(self) -> None:
p = self.path() p = self.path()
self._size = os.path.getsize(p)//HEADER_SIZE if os.path.exists(p) else 0 self._size = os.path.getsize(p)//80 if os.path.exists(p) else 0
@classmethod def verify_header(self, header, prev_hash, target):
def verify_header(self, header: dict, prev_hash: str, target: int, bits: int, expected_header_hash: str=None) -> None: _hash = hash_header(header)
_hash = pow_hash_header(header)
if expected_header_hash:
_hash2 = hash_header(header)
if expected_header_hash != _hash2:
raise Exception("hash mismatches with expected: {} vs {}".format(expected_header_hash, _hash2))
if prev_hash != header.get('prev_block_hash'): if prev_hash != header.get('prev_block_hash'):
raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash'))) raise Exception("prev hash mismatch: %s vs %s" % (prev_hash, header.get('prev_block_hash')))
if constants.net.TESTNET: if constants.net.TESTNET:
return return
bits = self.target_to_bits(target)
if bits != header.get('bits'):
raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits')))
if int('0x' + _hash, 16) > target:
raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target))
#if bits != header.get('bits'): def verify_chunk(self, index, data):
# raise Exception("bits mismatch: %s vs %s" % (bits, header.get('bits'))) num = len(data) // 80
#if int('0x' + _hash, 16) > target: prev_hash = self.get_hash(index * 2016 - 1)
# raise Exception("insufficient proof of work: %s vs target %s" % (int('0x' + _hash, 16), target)) target = self.get_target(index-1)
def verify_chunk(self, index: int, data: bytes) -> None:
num = len(data) // HEADER_SIZE
start_height = index * 2016
prev_hash = self.get_hash(start_height - 1)
for i in range(num): for i in range(num):
height = start_height + i raw_header = data[i*80:(i+1) * 80]
header = self.read_header(height - 1)
#bits, target = self.get_target2(height - 1, header)
try:
expected_header_hash = self.get_hash(height)
except MissingHeader:
expected_header_hash = None
raw_header = data[i*HEADER_SIZE : (i+1)*HEADER_SIZE]
header = deserialize_header(raw_header, index*2016 + i) header = deserialize_header(raw_header, index*2016 + i)
self.verify_header(header, prev_hash, 0, 0, expected_header_hash) self.verify_header(header, prev_hash, target)
prev_hash = hash_header(header) prev_hash = hash_header(header)
@with_lock
def path(self): def path(self):
d = util.get_headers_dir(self.config) d = util.get_headers_dir(self.config)
if self.parent is None: filename = 'blockchain_headers' if self.parent_id is None else os.path.join('forks', 'fork_%d_%d'%(self.parent_id, self.forkpoint))
filename = 'blockchain_headers'
else:
assert self.forkpoint > 0, self.forkpoint
prev_hash = self._prev_hash.lstrip('0')
first_hash = self._forkpoint_hash.lstrip('0')
basename = f'fork2_{self.forkpoint}_{prev_hash}_{first_hash}'
filename = os.path.join('forks', basename)
return os.path.join(d, filename) return os.path.join(d, filename)
@with_lock @with_lock
def save_chunk(self, index: int, chunk: bytes): def save_chunk(self, index, chunk):
assert index >= 0, index
chunk_within_checkpoint_region = index < len(self.checkpoints) chunk_within_checkpoint_region = index < len(self.checkpoints)
# chunks in checkpoint region are the responsibility of the 'main chain' # chunks in checkpoint region are the responsibility of the 'main chain'
if chunk_within_checkpoint_region and self.parent is not None: if chunk_within_checkpoint_region and self.parent_id is not None:
main_chain = get_best_chain() main_chain = blockchains[0]
main_chain.save_chunk(index, chunk) main_chain.save_chunk(index, chunk)
return return
delta_height = (index * 2016 - self.forkpoint) delta_height = (index * 2016 - self.forkpoint)
delta_bytes = delta_height * HEADER_SIZE delta_bytes = delta_height * 80
# if this chunk contains our forkpoint, only save the part after forkpoint # if this chunk contains our forkpoint, only save the part after forkpoint
# (the part before is the responsibility of the parent) # (the part before is the responsibility of the parent)
if delta_bytes < 0: if delta_bytes < 0:
@ -380,71 +213,42 @@ class Blockchain(Logger):
self.write(chunk, delta_bytes, truncate) self.write(chunk, delta_bytes, truncate)
self.swap_with_parent() self.swap_with_parent()
def swap_with_parent(self) -> None: @with_lock
with self.lock, blockchains_lock: def swap_with_parent(self):
# do the swap; possibly multiple ones if self.parent_id is None:
cnt = 0 return
while True: parent_branch_size = self.parent().height() - self.forkpoint + 1
old_parent = self.parent if parent_branch_size >= self.size():
if not self._swap_with_parent(): return
break self.print_error("swap", self.forkpoint, self.parent_id)
# make sure we are making progress parent_id = self.parent_id
cnt += 1 forkpoint = self.forkpoint
if cnt > len(blockchains): parent = self.parent()
raise Exception(f'swapping fork with parent too many times: {cnt}')
# we might have become the parent of some of our former siblings
for old_sibling in old_parent.get_direct_children():
if self.check_hash(old_sibling.forkpoint - 1, old_sibling._prev_hash):
old_sibling.parent = self
def _swap_with_parent(self) -> bool:
"""Check if this chain became stronger than its parent, and swap
the underlying files if so. The Blockchain instances will keep
'containing' the same headers, but their ids change and so
they will be stored in different files."""
if self.parent is None:
return False
if self.parent.get_chainwork() >= self.get_chainwork():
return False
self.logger.info(f"swapping {self.forkpoint} {self.parent.forkpoint}")
parent_branch_size = self.parent.height() - self.forkpoint + 1
forkpoint = self.forkpoint # type: Optional[int]
parent = self.parent # type: Optional[Blockchain]
child_old_id = self.get_id()
parent_old_id = parent.get_id()
# swap files
# child takes parent's name
# parent's new name will be something new (not child's old name)
self.assert_headers_file_available(self.path()) self.assert_headers_file_available(self.path())
child_old_name = self.path()
with open(self.path(), 'rb') as f: with open(self.path(), 'rb') as f:
my_data = f.read() my_data = f.read()
self.assert_headers_file_available(parent.path()) self.assert_headers_file_available(parent.path())
assert forkpoint > parent.forkpoint, (f"forkpoint of parent chain ({parent.forkpoint}) "
f"should be at lower height than children's ({forkpoint})")
with open(parent.path(), 'rb') as f: with open(parent.path(), 'rb') as f:
f.seek((forkpoint - parent.forkpoint)*HEADER_SIZE) f.seek((forkpoint - parent.forkpoint)*80)
parent_data = f.read(parent_branch_size*HEADER_SIZE) parent_data = f.read(parent_branch_size*80)
self.write(parent_data, 0) self.write(parent_data, 0)
parent.write(my_data, (forkpoint - parent.forkpoint)*HEADER_SIZE) parent.write(my_data, (forkpoint - parent.forkpoint)*80)
# store file path
for b in blockchains.values():
b.old_path = b.path()
# swap parameters # swap parameters
self.parent, parent.parent = parent.parent, self # type: Optional[Blockchain], Optional[Blockchain] self.parent_id = parent.parent_id; parent.parent_id = parent_id
self.forkpoint, parent.forkpoint = parent.forkpoint, self.forkpoint self.forkpoint = parent.forkpoint; parent.forkpoint = forkpoint
self._forkpoint_hash, parent._forkpoint_hash = parent._forkpoint_hash, hash_raw_header(bh2u(parent_data[:HEADER_SIZE])) self._size = parent._size; parent._size = parent_branch_size
self._prev_hash, parent._prev_hash = parent._prev_hash, self._prev_hash # move files
# parent's new name for b in blockchains.values():
os.replace(child_old_name, parent.path()) if b in [self, parent]: continue
self.update_size() if b.old_path != b.path():
parent.update_size() self.print_error("renaming", b.old_path, b.path())
os.rename(b.old_path, b.path())
# update pointers # update pointers
blockchains.pop(child_old_id, None) blockchains[self.forkpoint] = self
blockchains.pop(parent_old_id, None) blockchains[parent.forkpoint] = parent
blockchains[self.get_id()] = self
blockchains[parent.get_id()] = parent
return True
def get_id(self) -> str:
return self._forkpoint_hash
def assert_headers_file_available(self, path): def assert_headers_file_available(self, path):
if os.path.exists(path): if os.path.exists(path):
@ -454,76 +258,64 @@ class Blockchain(Logger):
else: else:
raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path)) raise FileNotFoundError('Cannot find headers file but headers_dir is there. Should be at {}'.format(path))
@with_lock def write(self, data, offset, truncate=True):
def write(self, data: bytes, offset: int, truncate: bool=True) -> None:
filename = self.path() filename = self.path()
self.assert_headers_file_available(filename) with self.lock:
with open(filename, 'rb+') as f: self.assert_headers_file_available(filename)
if truncate and offset != self._size * HEADER_SIZE: with open(filename, 'rb+') as f:
if truncate and offset != self._size*80:
f.seek(offset)
f.truncate()
f.seek(offset) f.seek(offset)
f.truncate() f.write(data)
f.seek(offset) f.flush()
f.write(data) os.fsync(f.fileno())
f.flush() self.update_size()
os.fsync(f.fileno())
self.update_size()
@with_lock @with_lock
def save_header(self, header: dict) -> None: def save_header(self, header):
delta = header.get('block_height') - self.forkpoint delta = header.get('block_height') - self.forkpoint
data = bfh(serialize_header(header)) data = bfh(serialize_header(header))
# headers are only _appended_ to the end: # headers are only _appended_ to the end:
assert delta == self.size(), (delta, self.size()) assert delta == self.size()
assert len(data) == HEADER_SIZE assert len(data) == 80
self.write(data, delta*HEADER_SIZE) self.write(data, delta*80)
self.swap_with_parent() self.swap_with_parent()
@with_lock def read_header(self, height):
def read_header(self, height: int) -> Optional[dict]: assert self.parent_id != self.forkpoint
if height < 0: if height < 0:
return return
if height < self.forkpoint: if height < self.forkpoint:
return self.parent.read_header(height) return self.parent().read_header(height)
if height > self.height(): if height > self.height():
return return
delta = height - self.forkpoint delta = height - self.forkpoint
name = self.path() name = self.path()
self.assert_headers_file_available(name) self.assert_headers_file_available(name)
with open(name, 'rb') as f: with open(name, 'rb') as f:
f.seek(delta * HEADER_SIZE) f.seek(delta * 80)
h = f.read(HEADER_SIZE) h = f.read(80)
if len(h) < HEADER_SIZE: if len(h) < 80:
raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h))) raise Exception('Expected to read a full header. This was only {} bytes'.format(len(h)))
if h == bytes([0])*HEADER_SIZE: if h == bytes([0])*80:
return None return None
return deserialize_header(h, height) return deserialize_header(h, height)
def header_at_tip(self) -> Optional[dict]: def get_hash(self, height):
"""Return latest header."""
height = self.height()
return self.read_header(height)
def get_hash(self, height: int) -> str:
def is_height_checkpoint():
within_cp_range = height <= constants.net.max_checkpoint()
at_chunk_boundary = (height+1) % 2016 == 0
return within_cp_range and at_chunk_boundary
if height == -1: if height == -1:
return '0000000000000000000000000000000000000000000000000000000000000000' return '0000000000000000000000000000000000000000000000000000000000000000'
elif height == 0: elif height == 0:
return constants.net.GENESIS return constants.net.GENESIS
elif is_height_checkpoint(): elif height < len(self.checkpoints) * 2016:
assert (height+1) % 2016 == 0, height
index = height // 2016 index = height // 2016
h, t = self.checkpoints[index] h, t = self.checkpoints[index]
return h return h
else: else:
header = self.read_header(height) return hash_header(self.read_header(height))
if header is None:
raise MissingHeader(height)
return hash_header(header)
def get_target(self, index: int) -> int: def get_target(self, index):
# compute target from chunk x, used in chunk x+1 # compute target from chunk x, used in chunk x+1
if constants.net.TESTNET: if constants.net.TESTNET:
return 0 return 0
@ -540,165 +332,65 @@ class Blockchain(Logger):
bits = last.get('bits') bits = last.get('bits')
target = self.bits_to_target(bits) target = self.bits_to_target(bits)
nActualTimespan = last.get('timestamp') - first.get('timestamp') nActualTimespan = last.get('timestamp') - first.get('timestamp')
nTargetTimespan = 150 nTargetTimespan = 14 * 24 * 60 * 60
nModulatedTimespan = nTargetTimespan - (nActualTimespan - nTargetTimespan) / 8 nActualTimespan = max(nActualTimespan, nTargetTimespan // 4)
nMinTimespan = nTargetTimespan - (nTargetTimespan / 8) nActualTimespan = min(nActualTimespan, nTargetTimespan * 4)
nMaxTimespan = nTargetTimespan + (nTargetTimespan / 2) new_target = min(MAX_TARGET, (target * nActualTimespan) // nTargetTimespan)
if nModulatedTimespan < nMinTimespan: return new_target
nModulatedTimespan = nMinTimespan
elif nModulatedTimespan > nMaxTimespan:
nModulatedTimespan = nMaxTimespan
bnOld = ArithUint256.SetCompact(bits) def bits_to_target(self, bits):
bnNew = bnOld * nModulatedTimespan
# this doesn't work if it is nTargetTimespan even though that
# is what it looks like it should be based on reading the code
# in lbry.cpp
bnNew /= nModulatedTimespan
if bnNew > MAX_TARGET:
bnNew = ArithUint256(MAX_TARGET)
return bnNew.compact(), bnNew._value
def get_target2(self, index, last, chain='main'):
if index == -1:
return GENESIS_BITS, MAX_TARGET
if index == 0:
return GENESIS_BITS, MAX_TARGET
first = self.read_header(index-1)
assert last is not None, "Last shouldn't be none"
# bits to target
bits = last.get('bits')
# print_error("Last bits: ", bits)
self.check_bits(bits)
# new target
nActualTimespan = last.get('timestamp') - first.get('timestamp')
nTargetTimespan = N_TARGET_TIMESPAN
nModulatedTimespan = nTargetTimespan - (nActualTimespan - nTargetTimespan) / 8
nMinTimespan = nTargetTimespan - (nTargetTimespan / 8)
nMaxTimespan = nTargetTimespan + (nTargetTimespan / 2)
if nModulatedTimespan < nMinTimespan:
nModulatedTimespan = nMinTimespan
elif nModulatedTimespan > nMaxTimespan:
nModulatedTimespan = nMaxTimespan
bnOld = ArithUint256.SetCompact(bits)
bnNew = bnOld * nModulatedTimespan
# this doesn't work if it is nTargetTimespan even though that
# is what it looks like it should be based on reading the code
# in lbry.cpp
bnNew /= nModulatedTimespan
if bnNew > MAX_TARGET:
bnNew = ArithUint256(MAX_TARGET)
return bnNew.compact, bnNew._value
def check_bits(self, bits):
bitsN = (bits >> 24) & 0xff bitsN = (bits >> 24) & 0xff
assert 0x03 <= bitsN <= 0x1f, \ if not (bitsN >= 0x03 and bitsN <= 0x1d):
"First part of bits should be in [0x03, 0x1d], but it was {}".format(hex(bitsN))
bitsBase = bits & 0xffffff
assert 0x8000 <= bitsBase <= 0x7fffff, \
"Second part of bits should be in [0x8000, 0x7fffff] but it was {}".format(bitsBase)
@classmethod
def bits_to_target(cls, bits: int) -> int:
bitsN = (bits >> 24) & 0xff
if not (0x03 <= bitsN <= 0x1f):
raise Exception("First part of bits should be in [0x03, 0x1d]") raise Exception("First part of bits should be in [0x03, 0x1d]")
bitsBase = bits & 0xffffff bitsBase = bits & 0xffffff
if not (0x8000 <= bitsBase <= 0x7fffff): if not (bitsBase >= 0x8000 and bitsBase <= 0x7fffff):
raise Exception("Second part of bits should be in [0x8000, 0x7fffff]") raise Exception("Second part of bits should be in [0x8000, 0x7fffff]")
return bitsBase << (8 * (bitsN-3)) return bitsBase << (8 * (bitsN-3))
@classmethod def target_to_bits(self, target):
def target_to_bits(cls, target: int) -> int:
c = ("%064x" % target)[2:] c = ("%064x" % target)[2:]
while c[:2] == '00' and len(c) > 6: while c[:2] == '00' and len(c) > 6:
c = c[2:] c = c[2:]
bitsN, bitsBase = len(c) // 2, int.from_bytes(bfh(c[:6]), byteorder='big') bitsN, bitsBase = len(c) // 2, int('0x' + c[:6], 16)
if bitsBase >= 0x800000: if bitsBase >= 0x800000:
bitsN += 1 bitsN += 1
bitsBase >>= 8 bitsBase >>= 8
return bitsN << 24 | bitsBase return bitsN << 24 | bitsBase
def chainwork_of_header_at_height(self, height: int) -> int: def can_connect(self, header, check_height=True):
"""work done by single header at given height"""
chunk_idx = height // 2016 - 1
target = self.get_target(chunk_idx)
work = ((2 ** 256 - target - 1) // (target + 1)) + 1
return work
@with_lock
def get_chainwork(self, height=None) -> int:
if height is None:
height = max(0, self.height())
if constants.net.TESTNET:
# On testnet/regtest, difficulty works somewhat different.
# It's out of scope to properly implement that.
return height
last_retarget = height // 2016 * 2016 - 1
cached_height = last_retarget
while _CHAINWORK_CACHE.get(self.get_hash(cached_height)) is None:
if cached_height <= -1:
break
cached_height -= 2016
assert cached_height >= -1, cached_height
running_total = _CHAINWORK_CACHE[self.get_hash(cached_height)]
while cached_height < last_retarget:
cached_height += 2016
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
work_in_chunk = 2016 * work_in_single_header
running_total += work_in_chunk
_CHAINWORK_CACHE[self.get_hash(cached_height)] = running_total
cached_height += 2016
work_in_single_header = self.chainwork_of_header_at_height(cached_height)
work_in_last_partial_chunk = (height % 2016 + 1) * work_in_single_header
return running_total + work_in_last_partial_chunk
def can_connect(self, header: dict, check_height: bool=True) -> bool:
if header is None: if header is None:
return False return False
height = header['block_height'] height = header['block_height']
if check_height and self.height() != height - 1: if check_height and self.height() != height - 1:
print("cannot connect at height", height) #self.print_error("cannot connect at height", height)
return False return False
if height == 0: if height == 0:
return hash_header(header) == constants.net.GENESIS return hash_header(header) == constants.net.GENESIS
try: try:
prev_hash = self.get_hash(height - 1) prev_hash = self.get_hash(height - 1)
except: except:
return False return False
if prev_hash != header.get('prev_block_hash'): if prev_hash != header.get('prev_block_hash'):
return False return False
try: try:
bits, target = self.get_target2(height, header) target = self.get_target(height // 2016 - 1)
except MissingHeader: except MissingHeader:
return False return False
try: try:
self.verify_header(header, prev_hash, target, bits) self.verify_header(header, prev_hash, target)
except BaseException as e: except BaseException as e:
print(e)
return False return False
return True return True
def connect_chunk(self, idx: int, hexdata: str) -> bool: def connect_chunk(self, idx, hexdata):
assert idx >= 0, idx
try: try:
data = bfh(hexdata) data = bfh(hexdata)
self.verify_chunk(idx, data) self.verify_chunk(idx, data)
#self.print_error("validated chunk %d" % idx)
self.save_chunk(idx, data) self.save_chunk(idx, data)
return True return True
except BaseException as e: except BaseException as e:
self.logger.info(f'verify_chunk idx {idx} failed: {repr(e)}') self.print_error('verify_chunk %d failed'%idx, str(e))
return False return False
def get_checkpoints(self): def get_checkpoints(self):
@ -710,101 +402,3 @@ class Blockchain(Logger):
target = self.get_target(index) target = self.get_target(index)
cp.append((h, target)) cp.append((h, target))
return cp return cp
def check_header(header: dict) -> Optional[Blockchain]:
if type(header) is not dict:
return None
with blockchains_lock: chains = list(blockchains.values())
for b in chains:
if b.check_header(header):
return b
return None
def can_connect(header: dict) -> Optional[Blockchain]:
with blockchains_lock: chains = list(blockchains.values())
for b in chains:
if b.can_connect(header):
return b
return None
class ArithUint256:
# https://github.com/bitcoin/bitcoin/blob/master/src/arith_uint256.cpp
__slots__ = '_value', '_compact'
def __init__(self, value: int) -> None:
self._value = value
self._compact: Optional[int] = None
@classmethod
def SetCompact(cls, nCompact):
return (ArithUint256.from_compact(nCompact))
@classmethod
def from_compact(cls, compact) -> 'ArithUint256':
size = compact >> 24
word = compact & 0x007fffff
if size <= 3:
return cls(word >> 8 * (3 - size))
else:
return cls(word << 8 * (size - 3))
@property
def value(self) -> int:
return self._value
@property
def compact(self) -> int:
if self._compact is None:
self._compact = self._calculate_compact()
return self._compact
@property
def negative(self) -> int:
return self._calculate_compact(negative=True)
@property
def bits(self) -> int:
""" Returns the position of the highest bit set plus one. """
bits = bin(self._value)[2:]
for i, d in enumerate(bits):
if d:
return (len(bits) - i) + 1
return 0
@property
def low64(self) -> int:
return self._value & 0xffffffffffffffff
def _calculate_compact(self, negative=False) -> int:
size = (self.bits + 7) // 8
if size <= 3:
compact = self.low64 << 8 * (3 - size)
else:
compact = ArithUint256(self._value >> 8 * (size - 3)).low64
# The 0x00800000 bit denotes the sign.
# Thus, if it is already set, divide the mantissa by 256 and increase the exponent.
if compact & 0x00800000:
compact >>= 8
size += 1
assert (compact & ~0x007fffff) == 0
assert size < 256
compact |= size << 24
if negative and compact & 0x007fffff:
compact |= 0x00800000
return compact
def __mul__(self, x):
# Take the mod because we are limited to an unsigned 256 bit number
return ArithUint256((self._value * x) % 2 ** 256)
def __truediv__(self, x):
return ArithUint256(int(self._value / x))
def __gt__(self, other):
return self._value > other
def __lt__(self, other):
return self._value < other

View file

@ -1,598 +0,0 @@
# -*- coding: utf-8 -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2018 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import random
import os
from collections import defaultdict
from typing import Sequence, List, Tuple, Optional, Dict, NamedTuple, TYPE_CHECKING, Set
import binascii
import base64
import asyncio
from .sql_db import SqlDB, sql
from . import constants
from .util import bh2u, profiler, get_headers_dir, bfh, is_ip_address, list_enabled_bits
from .logging import Logger
from .lnutil import LN_GLOBAL_FEATURES_KNOWN_SET, LNPeerAddr, format_short_channel_id, ShortChannelID
from .lnverifier import LNChannelVerifier, verify_sig_for_channel_update
if TYPE_CHECKING:
from .network import Network
class UnknownEvenFeatureBits(Exception): pass
def validate_features(features : int):
enabled_features = list_enabled_bits(features)
for fbit in enabled_features:
if (1 << fbit) not in LN_GLOBAL_FEATURES_KNOWN_SET and fbit % 2 == 0:
raise UnknownEvenFeatureBits()
FLAG_DISABLE = 1 << 1
FLAG_DIRECTION = 1 << 0
class ChannelInfo(NamedTuple):
short_channel_id: ShortChannelID
node1_id: bytes
node2_id: bytes
capacity_sat: Optional[int]
@staticmethod
def from_msg(payload):
features = int.from_bytes(payload['features'], 'big')
validate_features(features)
channel_id = payload['short_channel_id']
node_id_1 = payload['node_id_1']
node_id_2 = payload['node_id_2']
assert list(sorted([node_id_1, node_id_2])) == [node_id_1, node_id_2]
capacity_sat = None
return ChannelInfo(
short_channel_id = ShortChannelID.normalize(channel_id),
node1_id = node_id_1,
node2_id = node_id_2,
capacity_sat = capacity_sat
)
class Policy(NamedTuple):
key: bytes
cltv_expiry_delta: int
htlc_minimum_msat: int
htlc_maximum_msat: Optional[int]
fee_base_msat: int
fee_proportional_millionths: int
channel_flags: int
message_flags: int
timestamp: int
@staticmethod
def from_msg(payload):
return Policy(
key = payload['short_channel_id'] + payload['start_node'],
cltv_expiry_delta = int.from_bytes(payload['cltv_expiry_delta'], "big"),
htlc_minimum_msat = int.from_bytes(payload['htlc_minimum_msat'], "big"),
htlc_maximum_msat = int.from_bytes(payload['htlc_maximum_msat'], "big") if 'htlc_maximum_msat' in payload else None,
fee_base_msat = int.from_bytes(payload['fee_base_msat'], "big"),
fee_proportional_millionths = int.from_bytes(payload['fee_proportional_millionths'], "big"),
message_flags = int.from_bytes(payload['message_flags'], "big"),
channel_flags = int.from_bytes(payload['channel_flags'], "big"),
timestamp = int.from_bytes(payload['timestamp'], "big")
)
def is_disabled(self):
return self.channel_flags & FLAG_DISABLE
@property
def short_channel_id(self) -> ShortChannelID:
return ShortChannelID.normalize(self.key[0:8])
@property
def start_node(self):
return self.key[8:]
class NodeInfo(NamedTuple):
node_id: bytes
features: int
timestamp: int
alias: str
@staticmethod
def from_msg(payload):
node_id = payload['node_id']
features = int.from_bytes(payload['features'], "big")
validate_features(features)
addresses = NodeInfo.parse_addresses_field(payload['addresses'])
alias = payload['alias'].rstrip(b'\x00')
timestamp = int.from_bytes(payload['timestamp'], "big")
return NodeInfo(node_id=node_id, features=features, timestamp=timestamp, alias=alias), [
Address(host=host, port=port, node_id=node_id, last_connected_date=None) for host, port in addresses]
@staticmethod
def parse_addresses_field(addresses_field):
buf = addresses_field
def read(n):
nonlocal buf
data, buf = buf[0:n], buf[n:]
return data
addresses = []
while buf:
atype = ord(read(1))
if atype == 0:
pass
elif atype == 1: # IPv4
ipv4_addr = '.'.join(map(lambda x: '%d' % x, read(4)))
port = int.from_bytes(read(2), 'big')
if is_ip_address(ipv4_addr) and port != 0:
addresses.append((ipv4_addr, port))
elif atype == 2: # IPv6
ipv6_addr = b':'.join([binascii.hexlify(read(2)) for i in range(8)])
ipv6_addr = ipv6_addr.decode('ascii')
port = int.from_bytes(read(2), 'big')
if is_ip_address(ipv6_addr) and port != 0:
addresses.append((ipv6_addr, port))
elif atype == 3: # onion v2
host = base64.b32encode(read(10)) + b'.onion'
host = host.decode('ascii').lower()
port = int.from_bytes(read(2), 'big')
addresses.append((host, port))
elif atype == 4: # onion v3
host = base64.b32encode(read(35)) + b'.onion'
host = host.decode('ascii').lower()
port = int.from_bytes(read(2), 'big')
addresses.append((host, port))
else:
# unknown address type
# we don't know how long it is -> have to escape
# if there are other addresses we could have parsed later, they are lost.
break
return addresses
class Address(NamedTuple):
node_id: bytes
host: str
port: int
last_connected_date: Optional[int]
class CategorizedChannelUpdates(NamedTuple):
orphaned: List # no channel announcement for channel update
expired: List # update older than two weeks
deprecated: List # update older than database entry
good: List # good updates
to_delete: List # database entries to delete
# TODO It would make more sense to store the raw gossip messages in the db.
# That is pretty much a pre-requisite of actively participating in gossip.
create_channel_info = """
CREATE TABLE IF NOT EXISTS channel_info (
short_channel_id VARCHAR(64),
node1_id VARCHAR(66),
node2_id VARCHAR(66),
capacity_sat INTEGER,
PRIMARY KEY(short_channel_id)
)"""
create_policy = """
CREATE TABLE IF NOT EXISTS policy (
key VARCHAR(66),
cltv_expiry_delta INTEGER NOT NULL,
htlc_minimum_msat INTEGER NOT NULL,
htlc_maximum_msat INTEGER,
fee_base_msat INTEGER NOT NULL,
fee_proportional_millionths INTEGER NOT NULL,
channel_flags INTEGER NOT NULL,
message_flags INTEGER NOT NULL,
timestamp INTEGER NOT NULL,
PRIMARY KEY(key)
)"""
create_address = """
CREATE TABLE IF NOT EXISTS address (
node_id VARCHAR(66),
host STRING(256),
port INTEGER NOT NULL,
timestamp INTEGER,
PRIMARY KEY(node_id, host, port)
)"""
create_node_info = """
CREATE TABLE IF NOT EXISTS node_info (
node_id VARCHAR(66),
features INTEGER NOT NULL,
timestamp INTEGER NOT NULL,
alias STRING(64),
PRIMARY KEY(node_id)
)"""
class ChannelDB(SqlDB):
NUM_MAX_RECENT_PEERS = 20
def __init__(self, network: 'Network'):
path = os.path.join(get_headers_dir(network.config), 'channel_db')
super().__init__(network, path, commit_interval=100)
self.num_nodes = 0
self.num_channels = 0
self._channel_updates_for_private_channels = {} # type: Dict[Tuple[bytes, bytes], dict]
self.ca_verifier = LNChannelVerifier(network, self)
# initialized in load_data
self._channels = {} # type: Dict[bytes, ChannelInfo]
self._policies = {}
self._nodes = {}
# node_id -> (host, port, ts)
self._addresses = defaultdict(set) # type: Dict[bytes, Set[Tuple[str, int, int]]]
self._channels_for_node = defaultdict(set)
self.data_loaded = asyncio.Event()
self.network = network # only for callback
def update_counts(self):
self.num_nodes = len(self._nodes)
self.num_channels = len(self._channels)
self.num_policies = len(self._policies)
self.network.trigger_callback('channel_db', self.num_nodes, self.num_channels, self.num_policies)
def get_channel_ids(self):
return set(self._channels.keys())
def add_recent_peer(self, peer: LNPeerAddr):
now = int(time.time())
node_id = peer.pubkey
self._addresses[node_id].add((peer.host, peer.port, now))
self.save_node_address(node_id, peer, now)
def get_200_randomly_sorted_nodes_not_in(self, node_ids):
unshuffled = set(self._nodes.keys()) - node_ids
return random.sample(unshuffled, min(200, len(unshuffled)))
def get_last_good_address(self, node_id) -> Optional[LNPeerAddr]:
r = self._addresses.get(node_id)
if not r:
return None
addr = sorted(list(r), key=lambda x: x[2])[0]
host, port, timestamp = addr
try:
return LNPeerAddr(host, port, node_id)
except ValueError:
return None
def get_recent_peers(self):
assert self.data_loaded.is_set(), "channelDB load_data did not finish yet!"
# FIXME this does not reliably return "recent" peers...
# Also, the list() cast over the whole dict (thousands of elements),
# is really inefficient.
r = [self.get_last_good_address(node_id)
for node_id in list(self._addresses.keys())[-self.NUM_MAX_RECENT_PEERS:]]
return list(reversed(r))
# note: currently channel announcements are trusted by default (trusted=True);
# they are not verified. Verifying them would make the gossip sync
# even slower; especially as servers will start throttling us.
# It would probably put significant strain on servers if all clients
# verified the complete gossip.
def add_channel_announcement(self, msg_payloads, *, trusted=True):
if type(msg_payloads) is dict:
msg_payloads = [msg_payloads]
added = 0
for msg in msg_payloads:
short_channel_id = ShortChannelID(msg['short_channel_id'])
if short_channel_id in self._channels:
continue
if constants.net.rev_genesis_bytes() != msg['chain_hash']:
self.logger.info("ChanAnn has unexpected chain_hash {}".format(bh2u(msg['chain_hash'])))
continue
try:
channel_info = ChannelInfo.from_msg(msg)
except UnknownEvenFeatureBits:
self.logger.info("unknown feature bits")
continue
if trusted:
added += 1
self.add_verified_channel_info(msg)
else:
added += self.ca_verifier.add_new_channel_info(short_channel_id, msg)
self.update_counts()
self.logger.debug('add_channel_announcement: %d/%d'%(added, len(msg_payloads)))
def add_verified_channel_info(self, msg: dict, *, capacity_sat: int = None) -> None:
try:
channel_info = ChannelInfo.from_msg(msg)
except UnknownEvenFeatureBits:
return
channel_info = channel_info._replace(capacity_sat=capacity_sat)
self._channels[channel_info.short_channel_id] = channel_info
self._channels_for_node[channel_info.node1_id].add(channel_info.short_channel_id)
self._channels_for_node[channel_info.node2_id].add(channel_info.short_channel_id)
self.save_channel(channel_info)
def print_change(self, old_policy: Policy, new_policy: Policy):
# print what changed between policies
if old_policy.cltv_expiry_delta != new_policy.cltv_expiry_delta:
self.logger.info(f'cltv_expiry_delta: {old_policy.cltv_expiry_delta} -> {new_policy.cltv_expiry_delta}')
if old_policy.htlc_minimum_msat != new_policy.htlc_minimum_msat:
self.logger.info(f'htlc_minimum_msat: {old_policy.htlc_minimum_msat} -> {new_policy.htlc_minimum_msat}')
if old_policy.htlc_maximum_msat != new_policy.htlc_maximum_msat:
self.logger.info(f'htlc_maximum_msat: {old_policy.htlc_maximum_msat} -> {new_policy.htlc_maximum_msat}')
if old_policy.fee_base_msat != new_policy.fee_base_msat:
self.logger.info(f'fee_base_msat: {old_policy.fee_base_msat} -> {new_policy.fee_base_msat}')
if old_policy.fee_proportional_millionths != new_policy.fee_proportional_millionths:
self.logger.info(f'fee_proportional_millionths: {old_policy.fee_proportional_millionths} -> {new_policy.fee_proportional_millionths}')
if old_policy.channel_flags != new_policy.channel_flags:
self.logger.info(f'channel_flags: {old_policy.channel_flags} -> {new_policy.channel_flags}')
if old_policy.message_flags != new_policy.message_flags:
self.logger.info(f'message_flags: {old_policy.message_flags} -> {new_policy.message_flags}')
def add_channel_updates(self, payloads, max_age=None, verify=True) -> CategorizedChannelUpdates:
orphaned = []
expired = []
deprecated = []
good = []
to_delete = []
# filter orphaned and expired first
known = []
now = int(time.time())
for payload in payloads:
short_channel_id = ShortChannelID(payload['short_channel_id'])
timestamp = int.from_bytes(payload['timestamp'], "big")
if max_age and now - timestamp > max_age:
expired.append(payload)
continue
channel_info = self._channels.get(short_channel_id)
if not channel_info:
orphaned.append(payload)
continue
flags = int.from_bytes(payload['channel_flags'], 'big')
direction = flags & FLAG_DIRECTION
start_node = channel_info.node1_id if direction == 0 else channel_info.node2_id
payload['start_node'] = start_node
known.append(payload)
# compare updates to existing database entries
for payload in known:
timestamp = int.from_bytes(payload['timestamp'], "big")
start_node = payload['start_node']
short_channel_id = ShortChannelID(payload['short_channel_id'])
key = (start_node, short_channel_id)
old_policy = self._policies.get(key)
if old_policy and timestamp <= old_policy.timestamp:
deprecated.append(payload)
continue
good.append(payload)
if verify:
self.verify_channel_update(payload)
policy = Policy.from_msg(payload)
self._policies[key] = policy
self.save_policy(policy)
#
self.update_counts()
return CategorizedChannelUpdates(
orphaned=orphaned,
expired=expired,
deprecated=deprecated,
good=good,
to_delete=to_delete,
)
def add_channel_update(self, payload):
# called from add_own_channel
# the update may be categorized as deprecated because of caching
categorized_chan_upds = self.add_channel_updates([payload], verify=False)
def create_database(self):
c = self.conn.cursor()
c.execute(create_node_info)
c.execute(create_address)
c.execute(create_policy)
c.execute(create_channel_info)
self.conn.commit()
@sql
def save_policy(self, policy):
c = self.conn.cursor()
c.execute("""REPLACE INTO policy (key, cltv_expiry_delta, htlc_minimum_msat, htlc_maximum_msat, fee_base_msat, fee_proportional_millionths, channel_flags, message_flags, timestamp) VALUES (?,?,?,?,?,?,?,?,?)""", list(policy))
@sql
def delete_policy(self, node_id, short_channel_id):
key = short_channel_id + node_id
c = self.conn.cursor()
c.execute("""DELETE FROM policy WHERE key=?""", (key,))
@sql
def save_channel(self, channel_info):
c = self.conn.cursor()
c.execute("REPLACE INTO channel_info (short_channel_id, node1_id, node2_id, capacity_sat) VALUES (?,?,?,?)", list(channel_info))
@sql
def delete_channel(self, short_channel_id):
c = self.conn.cursor()
c.execute("""DELETE FROM channel_info WHERE short_channel_id=?""", (short_channel_id,))
@sql
def save_node(self, node_info):
c = self.conn.cursor()
c.execute("REPLACE INTO node_info (node_id, features, timestamp, alias) VALUES (?,?,?,?)", list(node_info))
@sql
def save_node_address(self, node_id, peer, now):
c = self.conn.cursor()
c.execute("REPLACE INTO address (node_id, host, port, timestamp) VALUES (?,?,?,?)", (node_id, peer.host, peer.port, now))
@sql
def save_node_addresses(self, node_id, node_addresses):
c = self.conn.cursor()
for addr in node_addresses:
c.execute("SELECT * FROM address WHERE node_id=? AND host=? AND port=?", (addr.node_id, addr.host, addr.port))
r = c.fetchall()
if r == []:
c.execute("INSERT INTO address (node_id, host, port, timestamp) VALUES (?,?,?,?)", (addr.node_id, addr.host, addr.port, 0))
def verify_channel_update(self, payload):
short_channel_id = payload['short_channel_id']
short_channel_id = ShortChannelID(short_channel_id)
if constants.net.rev_genesis_bytes() != payload['chain_hash']:
raise Exception('wrong chain hash')
if not verify_sig_for_channel_update(payload, payload['start_node']):
raise Exception(f'failed verifying channel update for {short_channel_id}')
def add_node_announcement(self, msg_payloads):
if type(msg_payloads) is dict:
msg_payloads = [msg_payloads]
old_addr = None
new_nodes = {}
for msg_payload in msg_payloads:
try:
node_info, node_addresses = NodeInfo.from_msg(msg_payload)
except UnknownEvenFeatureBits:
continue
node_id = node_info.node_id
# Ignore node if it has no associated channel (DoS protection)
if node_id not in self._channels_for_node:
#self.logger.info('ignoring orphan node_announcement')
continue
node = self._nodes.get(node_id)
if node and node.timestamp >= node_info.timestamp:
continue
node = new_nodes.get(node_id)
if node and node.timestamp >= node_info.timestamp:
continue
# save
self._nodes[node_id] = node_info
self.save_node(node_info)
for addr in node_addresses:
self._addresses[node_id].add((addr.host, addr.port, 0))
self.save_node_addresses(node_id, node_addresses)
self.logger.debug("on_node_announcement: %d/%d"%(len(new_nodes), len(msg_payloads)))
self.update_counts()
def get_routing_policy_for_channel(self, start_node_id: bytes,
short_channel_id: bytes) -> Optional[Policy]:
if not start_node_id or not short_channel_id: return None
channel_info = self.get_channel_info(short_channel_id)
if channel_info is not None:
return self.get_policy_for_node(short_channel_id, start_node_id)
msg = self._channel_updates_for_private_channels.get((start_node_id, short_channel_id))
if not msg:
return None
return Policy.from_msg(msg) # won't actually be written to DB
def get_old_policies(self, delta):
now = int(time.time())
return list(k for k, v in list(self._policies.items()) if v.timestamp <= now - delta)
def prune_old_policies(self, delta):
l = self.get_old_policies(delta)
if l:
for k in l:
self._policies.pop(k)
self.delete_policy(*k)
self.update_counts()
self.logger.info(f'Deleting {len(l)} old policies')
def get_orphaned_channels(self):
ids = set(x[1] for x in self._policies.keys())
return list(x for x in self._channels.keys() if x not in ids)
def prune_orphaned_channels(self):
l = self.get_orphaned_channels()
if l:
for short_channel_id in l:
self.remove_channel(short_channel_id)
self.update_counts()
self.logger.info(f'Deleting {len(l)} orphaned channels')
def add_channel_update_for_private_channel(self, msg_payload: dict, start_node_id: bytes):
if not verify_sig_for_channel_update(msg_payload, start_node_id):
return # ignore
short_channel_id = ShortChannelID(msg_payload['short_channel_id'])
msg_payload['start_node'] = start_node_id
self._channel_updates_for_private_channels[(start_node_id, short_channel_id)] = msg_payload
def remove_channel(self, short_channel_id: ShortChannelID):
channel_info = self._channels.pop(short_channel_id, None)
if channel_info:
self._channels_for_node[channel_info.node1_id].remove(channel_info.short_channel_id)
self._channels_for_node[channel_info.node2_id].remove(channel_info.short_channel_id)
# delete from database
self.delete_channel(short_channel_id)
def get_node_addresses(self, node_id):
return self._addresses.get(node_id)
@sql
@profiler
def load_data(self):
c = self.conn.cursor()
c.execute("""SELECT * FROM address""")
for x in c:
node_id, host, port, timestamp = x
self._addresses[node_id].add((str(host), int(port), int(timestamp or 0)))
c.execute("""SELECT * FROM channel_info""")
for x in c:
x = (ShortChannelID.normalize(x[0]), *x[1:])
ci = ChannelInfo(*x)
self._channels[ci.short_channel_id] = ci
c.execute("""SELECT * FROM node_info""")
for x in c:
ni = NodeInfo(*x)
self._nodes[ni.node_id] = ni
c.execute("""SELECT * FROM policy""")
for x in c:
p = Policy(*x)
self._policies[(p.start_node, p.short_channel_id)] = p
for channel_info in self._channels.values():
self._channels_for_node[channel_info.node1_id].add(channel_info.short_channel_id)
self._channels_for_node[channel_info.node2_id].add(channel_info.short_channel_id)
self.logger.info(f'load data {len(self._channels)} {len(self._policies)} {len(self._channels_for_node)}')
self.update_counts()
self.count_incomplete_channels()
self.data_loaded.set()
def count_incomplete_channels(self):
out = set()
for short_channel_id, ci in self._channels.items():
p1 = self.get_policy_for_node(short_channel_id, ci.node1_id)
p2 = self.get_policy_for_node(short_channel_id, ci.node2_id)
if p1 is None or p2 is not None:
out.add(short_channel_id)
self.logger.info(f'semi-orphaned: {len(out)}')
def get_policy_for_node(self, short_channel_id: bytes, node_id: bytes) -> Optional['Policy']:
return self._policies.get((node_id, short_channel_id))
def get_channel_info(self, channel_id: bytes) -> ChannelInfo:
return self._channels.get(channel_id)
def get_channels_for_node(self, node_id) -> Set[bytes]:
"""Returns the set of channels that have node_id as one of the endpoints."""
return self._channels_for_node.get(node_id) or set()

File diff suppressed because it is too large Load diff

View file

@ -2658,465 +2658,5 @@
[ [
"000000000000287fa294ea557835d8c98bfe94c4d8b18d5b10f1b62d68957113", "000000000000287fa294ea557835d8c98bfe94c4d8b18d5b10f1b62d68957113",
0 0
],
[
"000000000001d842f5a0dff13820ba1e151fd8c886e28e648a0be41f3a3f1cb3",
0
],
[
"000000000000906854973b2ec51409f0b78b25b074eef3f0dbb31e1060c07c3d",
0
],
[
"00000000000009e694e22b97a4757bffef74f0ccd832398b3e815171636e3a85",
0
],
[
"0000000000000594b95678610bd47671b1142eb575d1c1d4a0073f69a71a3c65",
0
],
[
"00000000000002ac6d5c058c9932f350aeef84f6e334f4e01b40be4db537f8c2",
0
],
[
"00000000000000c9a91d8277c58eab3bfda59d3068142dd54216129e5597ccbd",
0
],
[
"0000000000000051bff2f64c9078fb346d6a2a209ba5c3ffa0048c6b7027e47f",
0
],
[
"000000000000df3c366a105ce9ed82a4917c9e19f0736493894feaba2542c7cd",
0
],
[
"0000000000007c8006959f91675b2dbf6264a1172279c826ae7f561b70e88b12",
0
],
[
"0000000000015ab3720de7669e8731c84c392aae3509d937b8d883c304e0ca86",
0
],
[
"0000000000016d7156ee43da389020fb5d30f05e11498c54f7e324561d6a6039",
0
],
[
"0000000000009c9592f83d63fe39839080ced253e1d71c52bce576f823b7722a",
0
],
[
"00000000003dee6b438ddf51b831fbedb9d2ee91644aaf5866e3a85c740b3a99",
0
],
[
"00000000000155f5594d8a3ade605d1504ee9a6f6389f1c4516e974698ebb9e4",
0
],
[
"000000000001e21adfc306bf4aa2ad90e3c2aa4a43263d1bbdc70bf9f1593416",
0
],
[
"0000000000008218e84ba7d9850a5c12b77ec5d1348e7cbdfdcb86f8fe929682",
0
],
[
"00000000000054fb41b42b30fff1738104c3edca6dab47c75e4d3565bc4b9e34",
0
],
[
"0000000000002763b825c315ba35959dcc1bd8114627949ede769ac2eece8248",
0
],
[
"00000000000007437044da0baed38a28e2991c6a527f495e91739a8d9c35acbb",
0
],
[
"000000000000032d74ad8eb0a0be6b39b8e095bd9ca8537da93aae15087aafaf",
0
],
[
"000000000000006d4025181f5b54cca6d730cc26313817c6529ba9ed62cc83b3",
0
],
[
"000000001c3ad81ffea0b74d356b6886fd3381506b7c568f96c88a78815ede09",
0
],
[
"000000000140739d224af1254712d8c4e9fb9082b381baf22c628e459157ce49",
0
],
[
"000000000306491c835f1a03c8d1e17645435296d3593dacba8ab1a7d9341d38",
0
],
[
"000000000002b383618b228eb8e4cfcf269ba647b91ac6d60ddd070295709ad1",
0
],
[
"000000000000c90fc724a76407b4405032474fc8d1649817f7ad238b96856c6a",
0
],
[
"0000000000002d5a62b323a5f213152dd84e2f415a3c6c28043c0ccaaddb3229",
0
],
[
"0000000000008c086a21457ba523b682356c760538000a480650cd667a29647a",
0
],
[
"00000000000007c586d36266aa83d8cc702aa29f31e3cc01c6eeac5a0f5f9887",
0
],
[
"0000000000013bf175e35603f24758bf8d40b1f5c266e707e3ba4de6fae43a7f",
0
],
[
"00000000000096841c486983a4333afb2525549abe57e7263723b9782e9cfef1",
0
],
[
"00000000000012dfd7c4e1f40a1dd4833da2d010a33fc65c053871884146c941",
0
],
[
"0000000000000b47eb6bc8c6562b5a30cefcf81623a37f6f61cc7497a530eb33",
0
],
[
"0000000000000021ca4558aeb796f900e581c029d751f89e1a69ae9ba9f6ebb3",
0
],
[
"00000000000000a5bf9029aebb1956200304ffee31bc09f1323ae412d81fa2b2",
0
],
[
"0000000000000046f38ada53de3346d8191f69c8f3c0ba9e1950f5bf291989c4",
0
],
[
"00000000658b5a572ea407ac49a1dccf85d67d0adfc5f613b17fa3fff1d99d51",
0
],
[
"000000005d6be9ae758c520b0061feee99cd0a231f982cc074e4d0ced1f96952",
0
],
[
"0000000001aa4671747707d329a94c398c04aaf2268e551ac5d6a7f29ffd4acd",
0
],
[
"0000000004b441b97963463faca7a933469fabfa3e7b243621159e445e5c192a",
0
],
[
"0000000002ce8842113bc875330fa77f3b984a90806a5ec0bb73321fef3c76c6",
0
],
[
"0000000000019761bf9a1c6f679b880e9fb45b3f6dc1accdbdcfce01368c9377",
0
],
[
"0000000000008a069efd1a7923557be3d9584d307b2555dc0a56d66e74e083e1",
0
],
[
"000000000001c14cec52030659ef7d45318ca574f1633ef69e9c8c9bd7e45289",
0
],
[
"0000000000009cfccb8a27f66f1d9ff40c9d47449f78d82fee2465daca582ab7",
0
],
[
"0000000000007f30cfae7fbb8ff965f70d500b98be202b1dd57ea418500c922d",
0
],
[
"0000000000002cbd2dbab4352fe4979e0d5afc47f21ef575ae0e3bb620a5478a",
0
],
[
"000000000000017a872a5c7a15b3cb6e1ecf9e009759848b85c19ca6e7bd16d2",
0
],
[
"00000000000001ade79216032b49854c966a1061fd3f8c6c56a0d38d0024629e",
0
],
[
"0000000000000090b8dfe4dde9f9f8d675642db97b3649bd147f60d1fc64cd76",
0
],
[
"0000000000000109ed5f0d6fc387ad1bc45db1e522f76adce131067fc64440ec",
0
],
[
"000000000000003105650f0b8e7b4cb466cd32ff5608f59906879aff5cad64a7",
0
],
[
"0000000000000113d4262419a8aa3a4fe928c0ea81893a2d2ffee5258b2085d8",
0
],
[
"00000000000000f15b8a196b1c3568d14b5a7856da2fef7a7f5548266582ff28",
0
],
[
"0000000000000034fb9e91c8b5f7147bd1a4f089d19a266d183df6f8497d1dff",
0
],
[
"000000000000005e51ad800c9e8ab11abb4b945f5ea86b120fa140c8af6301e0",
0
],
[
"00000000000000e903f2002fd08a732fd5380ea1f2dac26bb84d57e247af8ac2",
0
],
[
"000000000015115dac432884296259f508dae6b6f5f15cef17939840f5a295c3",
0
],
[
"000000000029913c80e5f49d413603d91f5fd67b76a7e187f76c077973be6f8a",
0
],
[
"00000000002e864e470ccec1fec0ca5f2053c9a9b8978a40f3482b4d30f683a9",
0
],
[
"00000000001ccf523df85df9abdb7c5bbad5c5fcbd12a4a8eb4700de7291f03b",
0
],
[
"00000000002aa81027df021e3ccde48dff6e7f01a4aba27727308f2ce17f2f1a",
0
],
[
"000000000015a577d71d65bde7e8f5359458336218dc024584f7510b38dc1259",
0
],
[
"00000000003aef1877bcc6817cac497aeb95af3336ba2908e8194f96a2c9fc29",
0
],
[
"00000000000ccd42d542ddca68300ec2a9db2564327108234641535fd51aa7f3",
0
],
[
"000000000000a2652b2e523866f3c4d5c07dc1c204d439b627f2ab2848bfa139",
0
],
[
"0000000000002c065179a394d8da754c2e2db5fed21def076c16c24a902b448d",
0
],
[
"000000000000175a878558186e53b559e494ce7e9f687bf0462d63169bfcce03",
0
],
[
"00000000000007524a71cc81cadbd1ddf9d38848fa8081ad2a72eade4b70d1c1",
0
],
[
"0000000000000159321405d24d99131df6bf69ffeca30c4a949926807c4175ad",
0
],
[
"000000000000016c271ae44c8dca3567b332ec178a243be2a7dfa7a0aef270c3",
0
],
[
"00000000000000a7d62de601cdf73e25c49c1c99717c94ffd574fc657fd42fa8",
0
],
[
"0000000000000052d492170de491c1355d640bae48f4d954009e963f6f9a18c3",
0
],
[
"000000006f5707f2f707b9ddcce2739723e911210b131da4ca1efdff581212ad",
0
],
[
"00000000021be68dc9c33db0c2222e97cd2c06fc43834e8f5292133c45c2abb4",
0
],
[
"00000000019ca3eaf7c39f70a7a1a736f74021abf885bebc5d91aa946496bac5",
0
],
[
"00000000006e4752fbe2627ebb2d0118f7437908a8219f973324727195335209",
0
],
[
"00000000038471612a0955307f367071888985707ec0e42c82f9145caed8fea1",
0
],
[
"000000000004604d2d7d921b21d86f2ade82ded3af33877ec59d47072023d763",
0
],
[
"000000000034a3e45665a8dcbb94e7a218375a5199b3f3ca2cc7b5fe151bb198",
0
],
[
"0000000000043fb2c2ff5db60c6d2d35a633746e8585e04a096a9b55a4787fe6",
0
],
[
"0000000000020d4d8735b66134c1fcdd1d3f3d135b9ff3f70968ef96c227fb75",
0
],
[
"0000000000004f3f4dc1fa11a6ad9bd320413b042eb599c4599a14d341f6825f",
0
],
[
"0000000000001e0a495d23acf46a44f8b569ada39ac70730da5e9109871b77e9",
0
],
[
"00000000000002257a08acca858f239fabb258a7cc1665fc464f6e18e9372d32",
0
],
[
"00000000000002845d416fbfa05a5d40ba5ba5418a64f06443042a53cf1fd608",
0
],
[
"00000000000000fee91a2ae8b8d1bb9a687c9b28b0185723c8ff6ffdac2e9ce4",
0
],
[
"00000000000001d6874b4d88e387098c0b7100ff674d99781fc7045a78216a15",
0
],
[
"00000000000144a03e701c199673d72fc63766bcf0cdaf565f4c941c7ef72971",
0
],
[
"000000009b6cc4d8aee22cca6880e4d7bb30bff2851034ad437d63d3a7278de7",
0
],
[
"0000000023e998d64618475e31b4aee9d83d2bc32cb6d062aa97c0b4651fed08",
0
],
[
"0000000000036f4bf6b42a7776a97872fa24362064c5bc4bc946acb70ab6fbf4",
0
],
[
"0000000001e2252455ffd0cf0b4109ace996a0d2a03999f5cc5c5e08fb6130ac",
0
],
[
"0000000000002713db42d53f0c2d86c904f4e0338652acc1cbda953c530a15bb",
0
],
[
"000000000001b075f9ccc604a50326732f5d42373c4a831978be0e2d830cac75",
0
],
[
"0000000000000bfa7d93c6b36298b933b1a652c95ee9f0de4151e007f3180391",
0
],
[
"000000000002c60a0af1cfeb9c26c60970b354897fd0a94c8e5c414d0767b06b",
0
],
[
"0000000000001f2d9462507a9408859fb0b5f97013d6b4577337b0382340c5aa",
0
],
[
"0000000000000b7428e0d3c6c7fd2df623a74125db4989b1c61c78eeed1bcde5",
0
],
[
"00000000000002e8b4f1fa041a37515c1b76d59994792f1c772c9a4993c194dc",
0
],
[
"0000000000094e70c0cf5185b480542a1faa8392a3f2f7f583d91e033856d7ce",
0
],
[
"000000005b036d8c18ed5d1219e4137bd71438c9b1ba7ff4d10a626e9a7bcc98",
0
],
[
"0000000008745d4a943e958f5cb5084646c0fe1cae57eeab666c3ad0d4ff1dec",
0
],
[
"00000000000f8c5b3455e540d074b5c71709e37f8950975953798d27bdc701fa",
0
],
[
"0000000000050885884f7ac233bb174cf7b33c037f81907f7766afe9d0ad9091",
0
],
[
"000000000002d7cd1043ccd0581a47d6fdf82a7cf1646b61495f917a48ebeb5c",
0
],
[
"000000000003a2b3e3d7ef47829db1672bfd79e49f32ef3a04ec7c4df355392b",
0
],
[
"0000000000032a6c7e5bc3878c1815bc6759594a4736638fdacaa5642be3e649",
0
],
[
"000000000001386a3904f0ba4f25dc7ace09b67a6fe8977e7aecc55813fa9ac5",
0
],
[
"0000000000003fe030a2231da87076679c1d38d323bf56b45ceb49a5128fb4b1",
0
],
[
"000000000000147cd3b6195c6a727cd4fe6b3a879d7934e52bf29020ed9c6fcc",
0
],
[
"00000000000003ed5a0a7176f3f1b3ed26510045af2860e5b6313b358774fbad",
0
],
[
"00000000000000c2952ac8a580895ac13799a9c29badb6599bc4a86c1fc83b6e",
0
],
[
"0000000000000056f49d6f7b8243eecf6597946158efe044b07fd091398e380d",
0
],
[
"000000000000006b039683c36b18ec712346521edce4dc5b81cdaf6475d89bd7",
0
] ]
] ]

View file

@ -22,15 +22,12 @@
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE. # SOFTWARE.
from collections import defaultdict from collections import defaultdict, namedtuple
from math import floor, log10 from math import floor, log10
from typing import NamedTuple, List, Callable, Sequence, Union, Dict, Tuple
from decimal import Decimal
from .bitcoin import sha256, COIN, is_address from .bitcoin import sha256, COIN, TYPE_ADDRESS, is_address
from .transaction import Transaction, TxOutput, PartialTransaction, PartialTxInput, PartialTxOutput from .transaction import Transaction, TxOutput
from .util import NotEnoughFunds from .util import NotEnoughFunds, PrintError
from .logging import Logger
# A simple deterministic PRNG. Used to deterministically shuffle a # A simple deterministic PRNG. Used to deterministically shuffle a
@ -71,90 +68,54 @@ class PRNG:
x[i], x[j] = x[j], x[i] x[i], x[j] = x[j], x[i]
class Bucket(NamedTuple): Bucket = namedtuple('Bucket',
desc: str ['desc',
weight: int # as in BIP-141 'weight', # as in BIP-141
value: int # in satoshis 'value', # in satoshis
effective_value: int # estimate of value left after subtracting fees. in satoshis 'coins', # UTXOs
coins: List[PartialTxInput] # UTXOs 'min_height', # min block height where a coin was confirmed
min_height: int # min block height where a coin was confirmed 'witness']) # whether any coin uses segwit
witness: bool # whether any coin uses segwit
def strip_unneeded(bkts, sufficient_funds):
class ScoredCandidate(NamedTuple):
penalty: float
tx: PartialTransaction
buckets: List[Bucket]
def strip_unneeded(bkts: List[Bucket], sufficient_funds) -> List[Bucket]:
'''Remove buckets that are unnecessary in achieving the spend amount''' '''Remove buckets that are unnecessary in achieving the spend amount'''
if sufficient_funds([], bucket_value_sum=0): bkts = sorted(bkts, key = lambda bkt: bkt.value)
# none of the buckets are needed
return []
bkts = sorted(bkts, key=lambda bkt: bkt.value, reverse=True)
bucket_value_sum = 0
for i in range(len(bkts)): for i in range(len(bkts)):
bucket_value_sum += (bkts[i]).value if not sufficient_funds(bkts[i + 1:]):
if sufficient_funds(bkts[:i+1], bucket_value_sum=bucket_value_sum): return bkts[i:]
return bkts[:i+1] # Shouldn't get here
raise Exception("keeping all buckets is still not enough") return bkts
class CoinChooserBase(PrintError):
class CoinChooserBase(Logger):
enable_output_value_rounding = False enable_output_value_rounding = False
def __init__(self): def keys(self, coins):
Logger.__init__(self)
def keys(self, coins: Sequence[PartialTxInput]) -> Sequence[str]:
raise NotImplementedError raise NotImplementedError
def bucketize_coins(self, coins: Sequence[PartialTxInput], *, fee_estimator_vb): def bucketize_coins(self, coins):
keys = self.keys(coins) keys = self.keys(coins)
buckets = defaultdict(list) # type: Dict[str, List[PartialTxInput]] buckets = defaultdict(list)
for key, coin in zip(keys, coins): for key, coin in zip(keys, coins):
buckets[key].append(coin) buckets[key].append(coin)
# fee_estimator returns fee to be paid, for given vbytes.
# guess whether it is just returning a constant as follows.
constant_fee = fee_estimator_vb(2000) == fee_estimator_vb(200)
def make_Bucket(desc: str, coins: List[PartialTxInput]): def make_Bucket(desc, coins):
witness = any(Transaction.is_segwit_input(coin, guess_for_address=True) for coin in coins) witness = any(Transaction.is_segwit_input(coin, guess_for_address=True) for coin in coins)
# note that we're guessing whether the tx uses segwit based # note that we're guessing whether the tx uses segwit based
# on this single bucket # on this single bucket
weight = sum(Transaction.estimated_input_weight(coin, witness) weight = sum(Transaction.estimated_input_weight(coin, witness)
for coin in coins) for coin in coins)
value = sum(coin.value_sats() for coin in coins) value = sum(coin['value'] for coin in coins)
min_height = min(coin.block_height for coin in coins) min_height = min(coin['height'] for coin in coins)
assert min_height is not None return Bucket(desc, weight, value, coins, min_height, witness)
# the fee estimator is typically either a constant or a linear function,
# so the "function:" effective_value(bucket) will be homomorphic for addition
# i.e. effective_value(b1) + effective_value(b2) = effective_value(b1 + b2)
if constant_fee:
effective_value = value
else:
# when converting from weight to vBytes, instead of rounding up,
# keep fractional part, to avoid overestimating fee
fee = fee_estimator_vb(Decimal(weight) / 4)
effective_value = value - fee
return Bucket(desc=desc,
weight=weight,
value=value,
effective_value=effective_value,
coins=coins,
min_height=min_height,
witness=witness)
return list(map(make_Bucket, buckets.keys(), buckets.values())) return list(map(make_Bucket, buckets.keys(), buckets.values()))
def penalty_func(self, base_tx, *, def penalty_func(self, tx):
tx_from_buckets: Callable[[List[Bucket]], Tuple[PartialTransaction, List[PartialTxOutput]]]) \ def penalty(candidate):
-> Callable[[List[Bucket]], ScoredCandidate]: return 0
raise NotImplementedError return penalty
def _change_amounts(self, tx: PartialTransaction, count: int, fee_estimator_numchange) -> List[int]: def change_amounts(self, tx, count, fee_estimator, dust_threshold):
# Break change up if bigger than max_change # Break change up if bigger than max_change
output_amounts = [o.value for o in tx.outputs()] output_amounts = [o.value for o in tx.outputs()]
# Don't split change of less than 0.02 BTC # Don't split change of less than 0.02 BTC
@ -163,7 +124,7 @@ class CoinChooserBase(Logger):
# Use N change outputs # Use N change outputs
for n in range(1, count + 1): for n in range(1, count + 1):
# How much is left if we add this many change outputs? # How much is left if we add this many change outputs?
change_amount = max(0, tx.get_fee() - fee_estimator_numchange(n)) change_amount = max(0, tx.get_fee() - fee_estimator(n))
if change_amount // n <= max_change: if change_amount // n <= max_change:
break break
@ -200,7 +161,7 @@ class CoinChooserBase(Logger):
# no more than 10**max_dp_to_round_for_privacy # no more than 10**max_dp_to_round_for_privacy
# e.g. a max of 2 decimal places means losing 100 satoshis to fees # e.g. a max of 2 decimal places means losing 100 satoshis to fees
max_dp_to_round_for_privacy = 2 if self.enable_output_value_rounding else 0 max_dp_to_round_for_privacy = 2 if self.enable_output_value_rounding else 0
N = int(pow(10, min(max_dp_to_round_for_privacy, zeroes[0]))) N = pow(10, min(max_dp_to_round_for_privacy, zeroes[0]))
amount = (remaining // N) * N amount = (remaining // N) * N
amounts.append(amount) amounts.append(amount)
@ -208,157 +169,111 @@ class CoinChooserBase(Logger):
return amounts return amounts
def _change_outputs(self, tx: PartialTransaction, change_addrs, fee_estimator_numchange, def change_outputs(self, tx, change_addrs, fee_estimator, dust_threshold):
dust_threshold) -> List[PartialTxOutput]: amounts = self.change_amounts(tx, len(change_addrs), fee_estimator,
amounts = self._change_amounts(tx, len(change_addrs), fee_estimator_numchange) dust_threshold)
assert min(amounts) >= 0 assert min(amounts) >= 0
assert len(change_addrs) >= len(amounts) assert len(change_addrs) >= len(amounts)
assert all([isinstance(amt, int) for amt in amounts])
# If change is above dust threshold after accounting for the # If change is above dust threshold after accounting for the
# size of the change output, add it to the transaction. # size of the change output, add it to the transaction.
dust = sum(amount for amount in amounts if amount < dust_threshold)
amounts = [amount for amount in amounts if amount >= dust_threshold] amounts = [amount for amount in amounts if amount >= dust_threshold]
change = [PartialTxOutput.from_address_and_value(addr, amount) change = [TxOutput(TYPE_ADDRESS, addr, amount)
for addr, amount in zip(change_addrs, amounts)] for addr, amount in zip(change_addrs, amounts)]
self.print_error('change:', change)
if dust:
self.print_error('not keeping dust', dust)
return change return change
def _construct_tx_from_selected_buckets(self, *, buckets: Sequence[Bucket], def make_tx(self, coins, outputs, change_addrs, fee_estimator,
base_tx: PartialTransaction, change_addrs, dust_threshold):
fee_estimator_w, dust_threshold, """Select unspent coins to spend to pay outputs. If the change is
base_weight) -> Tuple[PartialTransaction, List[PartialTxOutput]]: greater than dust_threshold (after adding the change output to
# make a copy of base_tx so it won't get mutated the transaction) it is kept, otherwise none is sent and it is
tx = PartialTransaction.from_io(base_tx.inputs()[:], base_tx.outputs()[:]) added to the transaction fee.
Note: fee_estimator expects virtual bytes
"""
# Deterministic randomness from coins
utxos = [c['prevout_hash'] + str(c['prevout_n']) for c in coins]
self.p = PRNG(''.join(sorted(utxos)))
# Copy the outputs so when adding change we don't modify "outputs"
tx = Transaction.from_io([], outputs[:])
# Weight of the transaction with no inputs and no change
# Note: this will use legacy tx serialization as the need for "segwit"
# would be detected from inputs. The only side effect should be that the
# marker and flag are excluded, which is compensated in get_tx_weight()
base_weight = tx.estimated_weight()
spent_amount = tx.output_value()
def fee_estimator_w(weight):
return fee_estimator(Transaction.virtual_size_from_weight(weight))
def get_tx_weight(buckets):
total_weight = base_weight + sum(bucket.weight for bucket in buckets)
is_segwit_tx = any(bucket.witness for bucket in buckets)
if is_segwit_tx:
total_weight += 2 # marker and flag
# non-segwit inputs were previously assumed to have
# a witness of '' instead of '00' (hex)
# note that mixed legacy/segwit buckets are already ok
num_legacy_inputs = sum((not bucket.witness) * len(bucket.coins)
for bucket in buckets)
total_weight += num_legacy_inputs
return total_weight
def sufficient_funds(buckets):
'''Given a list of buckets, return True if it has enough
value to pay for the transaction'''
total_input = sum(bucket.value for bucket in buckets)
total_weight = get_tx_weight(buckets)
return total_input >= spent_amount + fee_estimator_w(total_weight)
# Collect the coins into buckets, choose a subset of the buckets
buckets = self.bucketize_coins(coins)
buckets = self.choose_buckets(buckets, sufficient_funds,
self.penalty_func(tx))
tx.add_inputs([coin for b in buckets for coin in b.coins]) tx.add_inputs([coin for b in buckets for coin in b.coins])
tx_weight = self._get_tx_weight(buckets, base_weight=base_weight) tx_weight = get_tx_weight(buckets)
# change is sent back to sending address unless specified # change is sent back to sending address unless specified
if not change_addrs: if not change_addrs:
change_addrs = [tx.inputs()[0].address] change_addrs = [tx.inputs()[0]['address']]
# note: this is not necessarily the final "first input address" # note: this is not necessarily the final "first input address"
# because the inputs had not been sorted at this point # because the inputs had not been sorted at this point
assert is_address(change_addrs[0]) assert is_address(change_addrs[0])
# This takes a count of change outputs and returns a tx fee # This takes a count of change outputs and returns a tx fee
output_weight = 4 * Transaction.estimated_output_size(change_addrs[0]) output_weight = 4 * Transaction.estimated_output_size(change_addrs[0])
fee_estimator_numchange = lambda count: fee_estimator_w(tx_weight + count * output_weight) fee = lambda count: fee_estimator_w(tx_weight + count * output_weight)
change = self._change_outputs(tx, change_addrs, fee_estimator_numchange, dust_threshold) change = self.change_outputs(tx, change_addrs, fee, dust_threshold)
tx.add_outputs(change) tx.add_outputs(change)
return tx, change self.print_error("using %d inputs" % len(tx.inputs()))
self.print_error("using buckets:", [bucket.desc for bucket in buckets])
def _get_tx_weight(self, buckets: Sequence[Bucket], *, base_weight: int) -> int:
"""Given a collection of buckets, return the total weight of the
resulting transaction.
base_weight is the weight of the tx that includes the fixed (non-change)
outputs and potentially some fixed inputs. Note that the change outputs
at this point are not yet known so they are NOT accounted for.
"""
total_weight = base_weight + sum(bucket.weight for bucket in buckets)
is_segwit_tx = any(bucket.witness for bucket in buckets)
if is_segwit_tx:
total_weight += 2 # marker and flag
# non-segwit inputs were previously assumed to have
# a witness of '' instead of '00' (hex)
# note that mixed legacy/segwit buckets are already ok
num_legacy_inputs = sum((not bucket.witness) * len(bucket.coins)
for bucket in buckets)
total_weight += num_legacy_inputs
return total_weight
def make_tx(self, *, coins: Sequence[PartialTxInput], inputs: List[PartialTxInput],
outputs: List[PartialTxOutput], change_addrs: Sequence[str],
fee_estimator_vb: Callable, dust_threshold: int) -> PartialTransaction:
"""Select unspent coins to spend to pay outputs. If the change is
greater than dust_threshold (after adding the change output to
the transaction) it is kept, otherwise none is sent and it is
added to the transaction fee.
`inputs` and `outputs` are guaranteed to be a subset of the
inputs and outputs of the resulting transaction.
`coins` are further UTXOs we can choose from.
Note: fee_estimator_vb expects virtual bytes
"""
assert outputs, 'tx outputs cannot be empty'
# Deterministic randomness from coins
utxos = [c.prevout.serialize_to_network() for c in coins]
self.p = PRNG(b''.join(sorted(utxos)))
# Copy the outputs so when adding change we don't modify "outputs"
base_tx = PartialTransaction.from_io(inputs[:], outputs[:])
input_value = base_tx.input_value()
# Weight of the transaction with no inputs and no change
# Note: this will use legacy tx serialization as the need for "segwit"
# would be detected from inputs. The only side effect should be that the
# marker and flag are excluded, which is compensated in get_tx_weight()
# FIXME calculation will be off by this (2 wu) in case of RBF batching
base_weight = base_tx.estimated_weight()
spent_amount = base_tx.output_value()
def fee_estimator_w(weight):
return fee_estimator_vb(Transaction.virtual_size_from_weight(weight))
def sufficient_funds(buckets, *, bucket_value_sum):
'''Given a list of buckets, return True if it has enough
value to pay for the transaction'''
# assert bucket_value_sum == sum(bucket.value for bucket in buckets) # expensive!
total_input = input_value + bucket_value_sum
if total_input < spent_amount: # shortcut for performance
return False
# note re performance: so far this was constant time
# what follows is linear in len(buckets)
total_weight = self._get_tx_weight(buckets, base_weight=base_weight)
return total_input >= spent_amount + fee_estimator_w(total_weight)
def tx_from_buckets(buckets):
return self._construct_tx_from_selected_buckets(buckets=buckets,
base_tx=base_tx,
change_addrs=change_addrs,
fee_estimator_w=fee_estimator_w,
dust_threshold=dust_threshold,
base_weight=base_weight)
# Collect the coins into buckets
all_buckets = self.bucketize_coins(coins, fee_estimator_vb=fee_estimator_vb)
# Filter some buckets out. Only keep those that have positive effective value.
# Note that this filtering is intentionally done on the bucket level
# instead of per-coin, as each bucket should be either fully spent or not at all.
# (e.g. CoinChooserPrivacy ensures that same-address coins go into one bucket)
all_buckets = list(filter(lambda b: b.effective_value > 0, all_buckets))
# Choose a subset of the buckets
scored_candidate = self.choose_buckets(all_buckets, sufficient_funds,
self.penalty_func(base_tx, tx_from_buckets=tx_from_buckets))
tx = scored_candidate.tx
self.logger.info(f"using {len(tx.inputs())} inputs")
self.logger.info(f"using buckets: {[bucket.desc for bucket in scored_candidate.buckets]}")
return tx return tx
def choose_buckets(self, buckets: List[Bucket], def choose_buckets(self, buckets, sufficient_funds, penalty_func):
sufficient_funds: Callable,
penalty_func: Callable[[List[Bucket]], ScoredCandidate]) -> ScoredCandidate:
raise NotImplemented('To be subclassed') raise NotImplemented('To be subclassed')
class CoinChooserRandom(CoinChooserBase): class CoinChooserRandom(CoinChooserBase):
def bucket_candidates_any(self, buckets: List[Bucket], sufficient_funds) -> List[List[Bucket]]: def bucket_candidates_any(self, buckets, sufficient_funds):
'''Returns a list of bucket sets.''' '''Returns a list of bucket sets.'''
if not buckets: if not buckets:
if sufficient_funds([], bucket_value_sum=0): raise NotEnoughFunds()
return [[]]
else:
raise NotEnoughFunds()
candidates = set() candidates = set()
# Add all singletons # Add all singletons
for n, bucket in enumerate(buckets): for n, bucket in enumerate(buckets):
if sufficient_funds([bucket], bucket_value_sum=bucket.value): if sufficient_funds([bucket]):
candidates.add((n, )) candidates.add((n, ))
# And now some random ones # And now some random ones
@ -369,23 +284,20 @@ class CoinChooserRandom(CoinChooserBase):
# incrementally combine buckets until sufficient # incrementally combine buckets until sufficient
self.p.shuffle(permutation) self.p.shuffle(permutation)
bkts = [] bkts = []
bucket_value_sum = 0
for count, index in enumerate(permutation): for count, index in enumerate(permutation):
bucket = buckets[index] bkts.append(buckets[index])
bkts.append(bucket) if sufficient_funds(bkts):
bucket_value_sum += bucket.value
if sufficient_funds(bkts, bucket_value_sum=bucket_value_sum):
candidates.add(tuple(sorted(permutation[:count + 1]))) candidates.add(tuple(sorted(permutation[:count + 1])))
break break
else: else:
# note: this assumes that the effective value of any bkt is >= 0 # FIXME this assumes that the effective value of any bkt is >= 0
# we should make sure not to choose buckets with <= 0 eff. val.
raise NotEnoughFunds() raise NotEnoughFunds()
candidates = [[buckets[n] for n in c] for c in candidates] candidates = [[buckets[n] for n in c] for c in candidates]
return [strip_unneeded(c, sufficient_funds) for c in candidates] return [strip_unneeded(c, sufficient_funds) for c in candidates]
def bucket_candidates_prefer_confirmed(self, buckets: List[Bucket], def bucket_candidates_prefer_confirmed(self, buckets, sufficient_funds):
sufficient_funds) -> List[List[Bucket]]:
"""Returns a list of bucket sets preferring confirmed coins. """Returns a list of bucket sets preferring confirmed coins.
Any bucket can be: Any bucket can be:
@ -403,20 +315,16 @@ class CoinChooserRandom(CoinChooserBase):
bucket_sets = [conf_buckets, unconf_buckets, other_buckets] bucket_sets = [conf_buckets, unconf_buckets, other_buckets]
already_selected_buckets = [] already_selected_buckets = []
already_selected_buckets_value_sum = 0
for bkts_choose_from in bucket_sets: for bkts_choose_from in bucket_sets:
try: try:
def sfunds(bkts, *, bucket_value_sum): def sfunds(bkts):
bucket_value_sum += already_selected_buckets_value_sum return sufficient_funds(already_selected_buckets + bkts)
return sufficient_funds(already_selected_buckets + bkts,
bucket_value_sum=bucket_value_sum)
candidates = self.bucket_candidates_any(bkts_choose_from, sfunds) candidates = self.bucket_candidates_any(bkts_choose_from, sfunds)
break break
except NotEnoughFunds: except NotEnoughFunds:
already_selected_buckets += bkts_choose_from already_selected_buckets += bkts_choose_from
already_selected_buckets_value_sum += sum(bucket.value for bucket in bkts_choose_from)
else: else:
raise NotEnoughFunds() raise NotEnoughFunds()
@ -425,14 +333,12 @@ class CoinChooserRandom(CoinChooserBase):
def choose_buckets(self, buckets, sufficient_funds, penalty_func): def choose_buckets(self, buckets, sufficient_funds, penalty_func):
candidates = self.bucket_candidates_prefer_confirmed(buckets, sufficient_funds) candidates = self.bucket_candidates_prefer_confirmed(buckets, sufficient_funds)
scored_candidates = [penalty_func(cand) for cand in candidates] penalties = [penalty_func(cand) for cand in candidates]
winner = min(scored_candidates, key=lambda x: x.penalty) winner = candidates[penalties.index(min(penalties))]
self.logger.info(f"Total number of buckets: {len(buckets)}") self.print_error("Bucket sets:", len(buckets))
self.logger.info(f"Num candidates considered: {len(candidates)}. " self.print_error("Winning penalty:", min(penalties))
f"Winning penalty: {winner.penalty}")
return winner return winner
class CoinChooserPrivacy(CoinChooserRandom): class CoinChooserPrivacy(CoinChooserRandom):
"""Attempts to better preserve user privacy. """Attempts to better preserve user privacy.
First, if any coin is spent from a user address, all coins are. First, if any coin is spent from a user address, all coins are.
@ -445,30 +351,26 @@ class CoinChooserPrivacy(CoinChooserRandom):
""" """
def keys(self, coins): def keys(self, coins):
return [coin.scriptpubkey.hex() for coin in coins] return [coin['address'] for coin in coins]
def penalty_func(self, base_tx, *, tx_from_buckets): def penalty_func(self, tx):
min_change = min(o.value for o in base_tx.outputs()) * 0.75 min_change = min(o.value for o in tx.outputs()) * 0.75
max_change = max(o.value for o in base_tx.outputs()) * 1.33 max_change = max(o.value for o in tx.outputs()) * 1.33
spent_amount = sum(o.value for o in tx.outputs())
def penalty(buckets: List[Bucket]) -> ScoredCandidate: def penalty(buckets):
# Penalize using many buckets (~inputs)
badness = len(buckets) - 1 badness = len(buckets) - 1
tx, change_outputs = tx_from_buckets(buckets) total_input = sum(bucket.value for bucket in buckets)
change = sum(o.value for o in change_outputs) # FIXME "change" here also includes fees
change = float(total_input - spent_amount)
# Penalize change not roughly in output range # Penalize change not roughly in output range
if change == 0: if change < min_change:
pass # no change is great!
elif change < min_change:
badness += (min_change - change) / (min_change + 10000) badness += (min_change - change) / (min_change + 10000)
# Penalize really small change; under 1 mBTC ~= using 1 more input
if change < COIN / 1000:
badness += 1
elif change > max_change: elif change > max_change:
badness += (change - max_change) / (max_change + 10000) badness += (change - max_change) / (max_change + 10000)
# Penalize large change; 5 BTC excess ~= using 1 more input # Penalize large change; 5 BTC excess ~= using 1 more input
badness += change / (COIN * 5) badness += change / (COIN * 5)
return ScoredCandidate(badness, tx, buckets) return badness
return penalty return penalty

File diff suppressed because it is too large Load diff

View file

@ -26,10 +26,6 @@
import os import os
import json import json
BLOCKS_PER_CHUNK = 96
from .util import inv_dict
from . import bitcoin
def read_json(filename, default): def read_json(filename, default):
path = os.path.join(os.path.dirname(__file__), filename) path = os.path.join(os.path.dirname(__file__), filename)
@ -41,35 +37,17 @@ def read_json(filename, default):
return r return r
GIT_REPO_URL = "https://github.com/spesmilo/electrum" class BitcoinMainnet:
GIT_REPO_ISSUES_URL = "https://github.com/spesmilo/electrum/issues"
class AbstractNet:
BLOCK_HEIGHT_FIRST_LIGHTNING_CHANNELS = 0
@classmethod
def max_checkpoint(cls) -> int:
return max(0, len(cls.CHECKPOINTS) * 2016 - 1)
@classmethod
def rev_genesis_bytes(cls) -> bytes:
return bytes.fromhex(bitcoin.rev_hex(cls.GENESIS))
class BitcoinMainnet(AbstractNet):
TESTNET = False TESTNET = False
WIF_PREFIX = 0x1c WIF_PREFIX = 0x80
ADDRTYPE_P2PKH = 0x55 ADDRTYPE_P2PKH = 0
ADDRTYPE_P2SH = 0x7A ADDRTYPE_P2SH = 5
SEGWIT_HRP = "lbc" SEGWIT_HRP = "bc"
GENESIS = "9c89283ba0f3227f6c03b70216b9f665f0118d5e0fa729cedf4fb34d6a34f463" GENESIS = "000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"
DEFAULT_PORTS = {'t': '50001', 's': '50002'} DEFAULT_PORTS = {'t': '50001', 's': '50002'}
DEFAULT_SERVERS = read_json('servers.json', {}) DEFAULT_SERVERS = read_json('servers.json', {})
CHECKPOINTS = read_json('bullshit.json', []) CHECKPOINTS = read_json('checkpoints.json', [])
BLOCK_HEIGHT_FIRST_LIGHTNING_CHANNELS = 497000
XPRV_HEADERS = { XPRV_HEADERS = {
'standard': 0x0488ade4, # xprv 'standard': 0x0488ade4, # xprv
@ -78,7 +56,6 @@ class BitcoinMainnet(AbstractNet):
'p2wpkh': 0x04b2430c, # zprv 'p2wpkh': 0x04b2430c, # zprv
'p2wsh': 0x02aa7a99, # Zprv 'p2wsh': 0x02aa7a99, # Zprv
} }
XPRV_HEADERS_INV = inv_dict(XPRV_HEADERS)
XPUB_HEADERS = { XPUB_HEADERS = {
'standard': 0x0488b21e, # xpub 'standard': 0x0488b21e, # xpub
'p2wpkh-p2sh': 0x049d7cb2, # ypub 'p2wpkh-p2sh': 0x049d7cb2, # ypub
@ -86,22 +63,16 @@ class BitcoinMainnet(AbstractNet):
'p2wpkh': 0x04b24746, # zpub 'p2wpkh': 0x04b24746, # zpub
'p2wsh': 0x02aa7ed3, # Zpub 'p2wsh': 0x02aa7ed3, # Zpub
} }
XPUB_HEADERS_INV = inv_dict(XPUB_HEADERS) BIP44_COIN_TYPE = 0
BIP44_COIN_TYPE = 140
LN_REALM_BYTE = 0
LN_DNS_SEEDS = [
'nodes.lightning.directory.',
'lseed.bitcoinstats.com.',
]
class BitcoinTestnet(AbstractNet): class BitcoinTestnet:
TESTNET = True TESTNET = True
WIF_PREFIX = 0xef WIF_PREFIX = 0xef
ADDRTYPE_P2PKH = 111 ADDRTYPE_P2PKH = 111
ADDRTYPE_P2SH = 196 ADDRTYPE_P2SH = 196
SEGWIT_HRP = "tlbc" SEGWIT_HRP = "tb"
GENESIS = "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943" GENESIS = "000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943"
DEFAULT_PORTS = {'t': '51001', 's': '51002'} DEFAULT_PORTS = {'t': '51001', 's': '51002'}
DEFAULT_SERVERS = read_json('servers_testnet.json', {}) DEFAULT_SERVERS = read_json('servers_testnet.json', {})
@ -114,7 +85,6 @@ class BitcoinTestnet(AbstractNet):
'p2wpkh': 0x045f18bc, # vprv 'p2wpkh': 0x045f18bc, # vprv
'p2wsh': 0x02575048, # Vprv 'p2wsh': 0x02575048, # Vprv
} }
XPRV_HEADERS_INV = inv_dict(XPRV_HEADERS)
XPUB_HEADERS = { XPUB_HEADERS = {
'standard': 0x043587cf, # tpub 'standard': 0x043587cf, # tpub
'p2wpkh-p2sh': 0x044a5262, # upub 'p2wpkh-p2sh': 0x044a5262, # upub
@ -122,34 +92,23 @@ class BitcoinTestnet(AbstractNet):
'p2wpkh': 0x045f1cf6, # vpub 'p2wpkh': 0x045f1cf6, # vpub
'p2wsh': 0x02575483, # Vpub 'p2wsh': 0x02575483, # Vpub
} }
XPUB_HEADERS_INV = inv_dict(XPUB_HEADERS)
BIP44_COIN_TYPE = 1 BIP44_COIN_TYPE = 1
LN_REALM_BYTE = 1
LN_DNS_SEEDS = [
'test.nodes.lightning.directory.',
'lseed.bitcoinstats.com.',
]
class BitcoinRegtest(BitcoinTestnet): class BitcoinRegtest(BitcoinTestnet):
SEGWIT_HRP = "blbc" SEGWIT_HRP = "bcrt"
GENESIS = "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206" GENESIS = "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206"
DEFAULT_SERVERS = read_json('servers_regtest.json', {}) DEFAULT_SERVERS = read_json('servers_regtest.json', {})
CHECKPOINTS = [] CHECKPOINTS = []
LN_DNS_SEEDS = []
class BitcoinSimnet(BitcoinTestnet): class BitcoinSimnet(BitcoinTestnet):
WIF_PREFIX = 0x64 SEGWIT_HRP = "sb"
ADDRTYPE_P2PKH = 0x3f
ADDRTYPE_P2SH = 0x7b
SEGWIT_HRP = "slbc"
GENESIS = "683e86bd5c6d110d91b94b97137ba6bfe02dbbdb8e3dff722a669b5d69d77af6" GENESIS = "683e86bd5c6d110d91b94b97137ba6bfe02dbbdb8e3dff722a669b5d69d77af6"
DEFAULT_SERVERS = read_json('servers_regtest.json', {}) DEFAULT_SERVERS = read_json('servers_regtest.json', {})
CHECKPOINTS = [] CHECKPOINTS = []
LN_DNS_SEEDS = []
# don't import net directly, import the module instead (so that net is singleton) # don't import net directly, import the module instead (so that net is singleton)
@ -163,6 +122,7 @@ def set_mainnet():
global net global net
net = BitcoinMainnet net = BitcoinMainnet
def set_testnet(): def set_testnet():
global net global net
net = BitcoinTestnet net = BitcoinTestnet

View file

@ -21,22 +21,22 @@
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE. # SOFTWARE.
import re import re
import dns import dns
from dns.exception import DNSException from dns.exception import DNSException
import json
import traceback
import sys
from . import bitcoin from . import bitcoin
from . import dnssec from . import dnssec
from .util import export_meta, import_meta, to_string from .util import export_meta, import_meta, print_error, to_string
from .logging import Logger
class Contacts(dict, Logger): class Contacts(dict):
def __init__(self, db): def __init__(self, storage):
Logger.__init__(self) self.storage = storage
self.db = db d = self.storage.get('contacts', {})
d = self.db.get('contacts', {})
try: try:
self.update(d) self.update(d)
except: except:
@ -49,7 +49,7 @@ class Contacts(dict, Logger):
self[n] = ('address', k) self[n] = ('address', k)
def save(self): def save(self):
self.db.put('contacts', dict(self)) self.storage.put('contacts', dict(self))
def import_file(self, path): def import_file(self, path):
import_meta(path, self._validate, self.load_meta) import_meta(path, self._validate, self.load_meta)
@ -67,9 +67,8 @@ class Contacts(dict, Logger):
def pop(self, key): def pop(self, key):
if key in self.keys(): if key in self.keys():
res = dict.pop(self, key) dict.pop(self, key)
self.save() self.save()
return res
def resolve(self, k): def resolve(self, k):
if bitcoin.is_address(k): if bitcoin.is_address(k):
@ -93,7 +92,7 @@ class Contacts(dict, Logger):
'type': 'openalias', 'type': 'openalias',
'validated': validated 'validated': validated
} }
raise Exception("Invalid LBRY Credits address or alias", k) raise Exception("Invalid Bitcoin address or alias", k)
def resolve_openalias(self, url): def resolve_openalias(self, url):
# support email-style addresses, per the OA standard # support email-style addresses, per the OA standard
@ -101,9 +100,9 @@ class Contacts(dict, Logger):
try: try:
records, validated = dnssec.query(url, dns.rdatatype.TXT) records, validated = dnssec.query(url, dns.rdatatype.TXT)
except DNSException as e: except DNSException as e:
self.logger.info(f'Error resolving openalias: {repr(e)}') print_error('Error resolving openalias: ', str(e))
return None return None
prefix = 'lbc' prefix = 'btc'
for record in records: for record in records:
string = to_string(record.strings[0], 'utf8') string = to_string(record.strings[0], 'utf8')
if string.startswith('oa1:' + prefix): if string.startswith('oa1:' + prefix):
@ -121,7 +120,7 @@ class Contacts(dict, Logger):
return regex.search(haystack).groups()[0] return regex.search(haystack).groups()[0]
except AttributeError: except AttributeError:
return None return None
def _validate(self, data): def _validate(self, data):
for k, v in list(data.items()): for k, v in list(data.items()):
if k == 'contacts': if k == 'contacts':
@ -133,3 +132,4 @@ class Contacts(dict, Logger):
if _type != 'address': if _type != 'address':
data.pop(k) data.pop(k)
return data return data

View file

@ -27,12 +27,10 @@ import base64
import os import os
import hashlib import hashlib
import hmac import hmac
from typing import Union
import pyaes import pyaes
from .util import assert_bytes, InvalidPassword, to_bytes, to_string, WalletFileException from .util import assert_bytes, InvalidPassword, to_bytes, to_string
from .i18n import _
try: try:
@ -45,26 +43,26 @@ class InvalidPadding(Exception):
pass pass
def append_PKCS7_padding(data: bytes) -> bytes: def append_PKCS7_padding(data):
assert_bytes(data) assert_bytes(data)
padlen = 16 - (len(data) % 16) padlen = 16 - (len(data) % 16)
return data + bytes([padlen]) * padlen return data + bytes([padlen]) * padlen
def strip_PKCS7_padding(data: bytes) -> bytes: def strip_PKCS7_padding(data):
assert_bytes(data) assert_bytes(data)
if len(data) % 16 != 0 or len(data) == 0: if len(data) % 16 != 0 or len(data) == 0:
raise InvalidPadding("invalid length") raise InvalidPadding("invalid length")
padlen = data[-1] padlen = data[-1]
if not (0 < padlen <= 16): if padlen > 16:
raise InvalidPadding("invalid padding byte (out of range)") raise InvalidPadding("invalid padding byte (large)")
for i in data[-padlen:]: for i in data[-padlen:]:
if i != padlen: if i != padlen:
raise InvalidPadding("invalid padding byte (inconsistent)") raise InvalidPadding("invalid padding byte (inconsistent)")
return data[0:-padlen] return data[0:-padlen]
def aes_encrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes: def aes_encrypt_with_iv(key, iv, data):
assert_bytes(key, iv, data) assert_bytes(key, iv, data)
data = append_PKCS7_padding(data) data = append_PKCS7_padding(data)
if AES: if AES:
@ -76,7 +74,7 @@ def aes_encrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
return e return e
def aes_decrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes: def aes_decrypt_with_iv(key, iv, data):
assert_bytes(key, iv, data) assert_bytes(key, iv, data)
if AES: if AES:
cipher = AES.new(key, AES.MODE_CBC, iv) cipher = AES.new(key, AES.MODE_CBC, iv)
@ -91,125 +89,60 @@ def aes_decrypt_with_iv(key: bytes, iv: bytes, data: bytes) -> bytes:
raise InvalidPassword() raise InvalidPassword()
def EncodeAES_base64(secret: bytes, msg: bytes) -> bytes: def EncodeAES(secret, s):
"""Returns base64 encoded ciphertext.""" assert_bytes(s)
e = EncodeAES_bytes(secret, msg) iv = bytes(os.urandom(16))
ct = aes_encrypt_with_iv(secret, iv, s)
e = iv + ct
return base64.b64encode(e) return base64.b64encode(e)
def DecodeAES(secret, e):
def EncodeAES_bytes(secret: bytes, msg: bytes) -> bytes: e = bytes(base64.b64decode(e))
assert_bytes(msg) iv, e = e[:16], e[16:]
iv = bytes(os.urandom(16))
ct = aes_encrypt_with_iv(secret, iv, msg)
return iv + ct
def DecodeAES_base64(secret: bytes, ciphertext_b64: Union[bytes, str]) -> bytes:
ciphertext = bytes(base64.b64decode(ciphertext_b64))
return DecodeAES_bytes(secret, ciphertext)
def DecodeAES_bytes(secret: bytes, ciphertext: bytes) -> bytes:
assert_bytes(ciphertext)
iv, e = ciphertext[:16], ciphertext[16:]
s = aes_decrypt_with_iv(secret, iv, e) s = aes_decrypt_with_iv(secret, iv, e)
return s return s
def pw_encode(s, password):
PW_HASH_VERSION_LATEST = 1 if password:
KNOWN_PW_HASH_VERSIONS = (1, 2, ) secret = Hash(password)
SUPPORTED_PW_HASH_VERSIONS = (1, ) return EncodeAES(secret, to_bytes(s, "utf8")).decode('utf8')
assert PW_HASH_VERSION_LATEST in KNOWN_PW_HASH_VERSIONS
assert PW_HASH_VERSION_LATEST in SUPPORTED_PW_HASH_VERSIONS
class UnexpectedPasswordHashVersion(InvalidPassword, WalletFileException):
def __init__(self, version):
self.version = version
def __str__(self):
return "{unexpected}: {version}\n{instruction}".format(
unexpected=_("Unexpected password hash version"),
version=self.version,
instruction=_('You are most likely using an outdated version of Electrum. Please update.'))
class UnsupportedPasswordHashVersion(InvalidPassword, WalletFileException):
def __init__(self, version):
self.version = version
def __str__(self):
return "{unsupported}: {version}\n{instruction}".format(
unsupported=_("Unsupported password hash version"),
version=self.version,
instruction=f"To open this wallet, try 'git checkout password_v{self.version}'.\n"
"Alternatively, restore from seed.")
def _hash_password(password: Union[bytes, str], *, version: int) -> bytes:
pw = to_bytes(password, 'utf8')
if version not in SUPPORTED_PW_HASH_VERSIONS:
raise UnsupportedPasswordHashVersion(version)
if version == 1:
return sha256d(pw)
else: else:
assert version not in KNOWN_PW_HASH_VERSIONS return s
raise UnexpectedPasswordHashVersion(version)
def pw_decode(s, password):
if password is not None:
secret = Hash(password)
try:
d = to_string(DecodeAES(secret, s), "utf8")
except Exception:
raise InvalidPassword()
return d
else:
return s
def pw_encode(data: str, password: Union[bytes, str, None], *, version: int) -> str: def sha256(x: bytes) -> bytes:
if not password:
return data
if version not in KNOWN_PW_HASH_VERSIONS:
raise UnexpectedPasswordHashVersion(version)
# derive key from password
secret = _hash_password(password, version=version)
# encrypt given data
ciphertext = EncodeAES_bytes(secret, to_bytes(data, "utf8"))
ciphertext_b64 = base64.b64encode(ciphertext)
return ciphertext_b64.decode('utf8')
def pw_decode(data: str, password: Union[bytes, str, None], *, version: int) -> str:
if password is None:
return data
if version not in KNOWN_PW_HASH_VERSIONS:
raise UnexpectedPasswordHashVersion(version)
data_bytes = bytes(base64.b64decode(data))
# derive key from password
secret = _hash_password(password, version=version)
# decrypt given data
try:
d = to_string(DecodeAES_bytes(secret, data_bytes), "utf8")
except Exception as e:
raise InvalidPassword() from e
return d
def sha256(x: Union[bytes, str]) -> bytes:
x = to_bytes(x, 'utf8') x = to_bytes(x, 'utf8')
return bytes(hashlib.sha256(x).digest()) return bytes(hashlib.sha256(x).digest())
def sha256d(x: Union[bytes, str]) -> bytes: def Hash(x: bytes) -> bytes:
x = to_bytes(x, 'utf8') x = to_bytes(x, 'utf8')
out = bytes(sha256(sha256(x))) out = bytes(sha256(sha256(x)))
return out return out
def hash_160(x: bytes) -> bytes: def hash_160(x: bytes) -> bytes:
return ripemd(sha256(x))
def ripemd(x):
try: try:
md = hashlib.new('ripemd160') md = hashlib.new('ripemd160')
md.update(x) md.update(sha256(x))
return md.digest() return md.digest()
except BaseException: except BaseException:
from . import ripemd from . import ripemd
md = ripemd.new(x) md = ripemd.new(sha256(x))
return md.digest() return md.digest()
def hmac_oneshot(key: bytes, msg: bytes, digest) -> bytes: def hmac_oneshot(key: bytes, msg: bytes, digest) -> bytes:
if hasattr(hmac, 'digest'): if hasattr(hmac, 'digest'):
# requires python 3.7+; faster # requires python 3.7+; faster

View file

@ -28,7 +28,6 @@
"BTC", "BTC",
"BTN", "BTN",
"BWP", "BWP",
"BYN",
"BZD", "BZD",
"CAD", "CAD",
"CDF", "CDF",
@ -47,7 +46,6 @@
"DZD", "DZD",
"EGP", "EGP",
"ETB", "ETB",
"ETH",
"EUR", "EUR",
"FJD", "FJD",
"FKP", "FKP",
@ -111,7 +109,6 @@
"NZD", "NZD",
"OMR", "OMR",
"PAB", "PAB",
"PAX",
"PEN", "PEN",
"PGK", "PGK",
"PHP", "PHP",
@ -152,7 +149,6 @@
"UYU", "UYU",
"UZS", "UZS",
"VEF", "VEF",
"VES",
"VND", "VND",
"VUV", "VUV",
"WST", "WST",
@ -162,27 +158,197 @@
"XCD", "XCD",
"XOF", "XOF",
"XPF", "XPF",
"XRP",
"YER", "YER",
"ZAR", "ZAR",
"ZMW", "ZMW",
"ZWL" "ZWL"
], ],
"BitStamp": [ "BitStamp": [
"USD", "USD"
"EUR"
], ],
"Bitbank": [ "Bitbank": [
"JPY" "JPY"
], ],
"BitcoinAverage": [
"AED",
"AFN",
"ALL",
"AMD",
"ANG",
"AOA",
"ARS",
"AUD",
"AWG",
"AZN",
"BAM",
"BBD",
"BDT",
"BGN",
"BHD",
"BIF",
"BMD",
"BND",
"BOB",
"BRL",
"BSD",
"BTN",
"BWP",
"BYN",
"BZD",
"CAD",
"CDF",
"CHF",
"CLF",
"CLP",
"CNH",
"CNY",
"COP",
"CRC",
"CUC",
"CUP",
"CVE",
"CZK",
"DJF",
"DKK",
"DOP",
"DZD",
"EGP",
"ERN",
"ETB",
"EUR",
"FJD",
"FKP",
"GBP",
"GEL",
"GGP",
"GHS",
"GIP",
"GMD",
"GNF",
"GTQ",
"GYD",
"HKD",
"HNL",
"HRK",
"HTG",
"HUF",
"IDR",
"ILS",
"IMP",
"INR",
"IQD",
"IRR",
"ISK",
"JEP",
"JMD",
"JOD",
"JPY",
"KES",
"KGS",
"KHR",
"KMF",
"KPW",
"KRW",
"KWD",
"KYD",
"KZT",
"LAK",
"LBP",
"LKR",
"LRD",
"LSL",
"LYD",
"MAD",
"MDL",
"MGA",
"MKD",
"MMK",
"MNT",
"MOP",
"MRO",
"MUR",
"MVR",
"MWK",
"MXN",
"MYR",
"MZN",
"NAD",
"NGN",
"NIO",
"NOK",
"NPR",
"NZD",
"OMR",
"PAB",
"PEN",
"PGK",
"PHP",
"PKR",
"PLN",
"PYG",
"QAR",
"RON",
"RSD",
"RUB",
"RWF",
"SAR",
"SBD",
"SCR",
"SDG",
"SEK",
"SGD",
"SHP",
"SLL",
"SOS",
"SRD",
"SSP",
"STD",
"SVC",
"SYP",
"SZL",
"THB",
"TJS",
"TMT",
"TND",
"TOP",
"TRY",
"TTD",
"TWD",
"TZS",
"UAH",
"UGX",
"USD",
"UYU",
"UZS",
"VEF",
"VND",
"VUV",
"WST",
"XAF",
"XAG",
"XAU",
"XCD",
"XDR",
"XOF",
"XPD",
"XPF",
"XPT",
"YER",
"ZAR",
"ZMW",
"ZWL"
],
"Bitcointoyou": [
"BRL"
],
"BitcoinVenezuela": [ "BitcoinVenezuela": [
"ARS", "ARS",
"ETH",
"EUR", "EUR",
"LTC",
"USD", "USD",
"VEF", "VEF"
"XMR" ],
"Bitmarket": [
"PLN"
], ],
"Bitso": [ "Bitso": [
"MXN" "MXN"
@ -214,12 +380,6 @@
"TWD", "TWD",
"USD" "USD"
], ],
"Bylls": [
"CAD"
],
"CoinCap": [
"USD"
],
"CoinDesk": [ "CoinDesk": [
"AED", "AED",
"AFN", "AFN",
@ -389,63 +549,6 @@
"ZMW", "ZMW",
"ZWL" "ZWL"
], ],
"CoinGecko": [
"AED",
"ARS",
"AUD",
"BCH",
"BDT",
"BHD",
"BMD",
"BNB",
"BRL",
"BTC",
"CAD",
"CHF",
"CLP",
"CNY",
"CZK",
"DKK",
"EOS",
"ETH",
"EUR",
"GBP",
"HKD",
"HUF",
"IDR",
"ILS",
"INR",
"JPY",
"KRW",
"KWD",
"LKR",
"LTC",
"MMK",
"MXN",
"MYR",
"NOK",
"NZD",
"PHP",
"PKR",
"PLN",
"RUB",
"SAR",
"SEK",
"SGD",
"THB",
"TRY",
"TWD",
"UAH",
"USD",
"VEF",
"VND",
"XAG",
"XAU",
"XDR",
"XLM",
"XRP",
"ZAR"
],
"Coinbase": [ "Coinbase": [
"AED", "AED",
"AFN", "AFN",
@ -458,7 +561,6 @@
"AWG", "AWG",
"AZN", "AZN",
"BAM", "BAM",
"BAT",
"BBD", "BBD",
"BCH", "BCH",
"BDT", "BDT",
@ -470,8 +572,6 @@
"BOB", "BOB",
"BRL", "BRL",
"BSD", "BSD",
"BSV",
"BTC",
"BTN", "BTN",
"BWP", "BWP",
"BYN", "BYN",
@ -489,17 +589,14 @@
"CUC", "CUC",
"CVE", "CVE",
"CZK", "CZK",
"DAI",
"DJF", "DJF",
"DKK", "DKK",
"DOP", "DOP",
"DZD", "DZD",
"EEK", "EEK",
"EGP", "EGP",
"EOS",
"ERN", "ERN",
"ETB", "ETB",
"ETC",
"ETH", "ETH",
"EUR", "EUR",
"FJD", "FJD",
@ -567,7 +664,6 @@
"NPR", "NPR",
"NZD", "NZD",
"OMR", "OMR",
"OXT",
"PAB", "PAB",
"PEN", "PEN",
"PGK", "PGK",
@ -576,12 +672,10 @@
"PLN", "PLN",
"PYG", "PYG",
"QAR", "QAR",
"REP",
"RON", "RON",
"RSD", "RSD",
"RUB", "RUB",
"RWF", "RWF",
"SAI",
"SAR", "SAR",
"SBD", "SBD",
"SCR", "SCR",
@ -610,7 +704,6 @@
"UYU", "UYU",
"UZS", "UZS",
"VEF", "VEF",
"VES",
"VND", "VND",
"VUV", "VUV",
"WST", "WST",
@ -619,21 +712,19 @@
"XAU", "XAU",
"XCD", "XCD",
"XDR", "XDR",
"XLM",
"XOF", "XOF",
"XPD", "XPD",
"XPF", "XPF",
"XPT", "XPT",
"XRP",
"XTZ",
"YER", "YER",
"ZAR", "ZAR",
"ZEC",
"ZMK", "ZMK",
"ZMW", "ZMW",
"ZRX",
"ZWL" "ZWL"
], ],
"Foxbit": [
"BRL"
],
"Kraken": [ "Kraken": [
"CAD", "CAD",
"EUR", "EUR",
@ -643,14 +734,13 @@
], ],
"LocalBitcoins": [ "LocalBitcoins": [
"AED", "AED",
"AMD",
"AOA",
"ARS", "ARS",
"AUD", "AUD",
"BDT", "BDT",
"BGN", "BHD",
"BOB", "BOB",
"BRL", "BRL",
"BWP",
"BYN", "BYN",
"CAD", "CAD",
"CHF", "CHF",
@ -667,7 +757,6 @@
"GBP", "GBP",
"GEL", "GEL",
"GHS", "GHS",
"GTQ",
"HKD", "HKD",
"HRK", "HRK",
"HUF", "HUF",
@ -676,18 +765,16 @@
"INR", "INR",
"IRR", "IRR",
"JOD", "JOD",
"JPY",
"KES", "KES",
"KRW", "KRW",
"KWD",
"KZT", "KZT",
"LKR",
"LTC", "LTC",
"MAD", "MAD",
"MDL",
"MUR",
"MXN", "MXN",
"MYR", "MYR",
"NGN", "NGN",
"NIO",
"NOK", "NOK",
"NZD", "NZD",
"PAB", "PAB",
@ -699,7 +786,6 @@
"RON", "RON",
"RSD", "RSD",
"RUB", "RUB",
"RWF",
"SAR", "SAR",
"SEK", "SEK",
"SGD", "SGD",
@ -712,18 +798,26 @@
"UGX", "UGX",
"USD", "USD",
"UYU", "UYU",
"VES", "VEF",
"VND", "VND",
"XAF", "XAF",
"XOF",
"XRP", "XRP",
"ZAR", "ZAR",
"ZMW" "ZMW"
], ],
"MercadoBitcoin": [
"BRL"
],
"NegocieCoins": [
"BRL"
],
"TheRockTrading": [ "TheRockTrading": [
"EUR" "EUR"
], ],
"Zaif": [ "WEX": [
"JPY" "EUR",
] "RUB",
} "USD"
],
"itBit": []
}

View file

@ -22,45 +22,29 @@
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE. # SOFTWARE.
import asyncio
import ast import ast
import os import os
import time import time
import traceback import traceback
import sys import sys
import threading
from typing import Dict, Optional, Tuple, Iterable
from base64 import b64decode, b64encode
from collections import defaultdict
import aiohttp # from jsonrpc import JSONRPCResponseManager
from aiohttp import web, client_exceptions import jsonrpclib
import jsonrpcclient from .jsonrpc import VerifyingJSONRPCServer
import jsonrpcserver
from jsonrpcserver import response
from jsonrpcclient.clients.aiohttp_client import AiohttpClient
from aiorpcx import TaskGroup
from .version import ELECTRUM_VERSION
from .network import Network from .network import Network
from .util import (json_decode, to_bytes, to_string, profiler, standardize_path, constant_time_compare) from .util import json_decode, DaemonThread
from .util import PR_PAID, PR_EXPIRED, get_request_status from .util import print_error, to_string
from .util import log_exceptions, ignore_exceptions, randrange from .wallet import Wallet
from .wallet import Wallet, Abstract_Wallet
from .storage import WalletStorage from .storage import WalletStorage
from .wallet_db import WalletDB
from .commands import known_commands, Commands from .commands import known_commands, Commands
from .simple_config import SimpleConfig from .simple_config import SimpleConfig
from .exchange_rate import FxThread from .exchange_rate import FxThread
from .logging import get_logger, Logger from .plugin import run_hook
_logger = get_logger(__name__) def get_lockfile(config):
class DaemonNotRunning(Exception):
pass
def get_lockfile(config: SimpleConfig):
return os.path.join(config.path, 'daemon') return os.path.join(config.path, 'daemon')
@ -68,7 +52,7 @@ def remove_lockfile(lockfile):
os.unlink(lockfile) os.unlink(lockfile)
def get_file_descriptor(config: SimpleConfig): def get_fd_or_server(config):
'''Tries to create the lockfile, using O_EXCL to '''Tries to create the lockfile, using O_EXCL to
prevent races. If it succeeds it returns the FD. prevent races. If it succeeds it returns the FD.
Otherwise try and connect to the server specified in the lockfile. Otherwise try and connect to the server specified in the lockfile.
@ -77,447 +61,256 @@ def get_file_descriptor(config: SimpleConfig):
lockfile = get_lockfile(config) lockfile = get_lockfile(config)
while True: while True:
try: try:
return os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644) return os.open(lockfile, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o644), None
except OSError: except OSError:
pass pass
try: server = get_server(config)
request(config, 'ping') if server is not None:
return None return None, server
except DaemonNotRunning: # Couldn't connect; remove lockfile and try again.
# Couldn't connect; remove lockfile and try again. remove_lockfile(lockfile)
remove_lockfile(lockfile)
def get_server(config):
def request(config: SimpleConfig, endpoint, args=(), timeout=60):
lockfile = get_lockfile(config) lockfile = get_lockfile(config)
while True: while True:
create_time = None create_time = None
try: try:
with open(lockfile) as f: with open(lockfile) as f:
(host, port), create_time = ast.literal_eval(f.read()) (host, port), create_time = ast.literal_eval(f.read())
except Exception: rpc_user, rpc_password = get_rpc_credentials(config)
raise DaemonNotRunning() if rpc_password == '':
rpc_user, rpc_password = get_rpc_credentials(config) # authentication disabled
server_url = 'http://%s:%d' % (host, port) server_url = 'http://%s:%d' % (host, port)
auth = aiohttp.BasicAuth(login=rpc_user, password=rpc_password) else:
loop = asyncio.get_event_loop() server_url = 'http://%s:%s@%s:%d' % (
async def request_coroutine(): rpc_user, rpc_password, host, port)
async with aiohttp.ClientSession(auth=auth) as session: server = jsonrpclib.Server(server_url)
server = AiohttpClient(session, server_url) # Test daemon is running
f = getattr(server, endpoint) server.ping()
response = await f(*args) return server
return response.data.result except Exception as e:
try: print_error("[get_server]", e)
fut = asyncio.run_coroutine_threadsafe(request_coroutine(), loop) if not create_time or create_time < time.time() - 1.0:
return fut.result(timeout=timeout) return None
except aiohttp.client_exceptions.ClientConnectorError as e:
_logger.info(f"failed to connect to JSON-RPC server {e}")
if not create_time or create_time < time.time() - 1.0:
raise DaemonNotRunning()
# Sleep a bit and try again; it might have just been started # Sleep a bit and try again; it might have just been started
time.sleep(1.0) time.sleep(1.0)
def get_rpc_credentials(config: SimpleConfig) -> Tuple[str, str]: def get_rpc_credentials(config):
rpc_user = config.get('rpcuser', None) rpc_user = config.get('rpcuser', None)
rpc_password = config.get('rpcpassword', None) rpc_password = config.get('rpcpassword', None)
if rpc_user is None or rpc_password is None: if rpc_user is None or rpc_password is None:
rpc_user = 'user' rpc_user = 'user'
import ecdsa, base64
bits = 128 bits = 128
nbytes = bits // 8 + (bits % 8 > 0) nbytes = bits // 8 + (bits % 8 > 0)
pw_int = randrange(pow(2, bits)) pw_int = ecdsa.util.randrange(pow(2, bits))
pw_b64 = b64encode( pw_b64 = base64.b64encode(
pw_int.to_bytes(nbytes, 'big'), b'-_') pw_int.to_bytes(nbytes, 'big'), b'-_')
rpc_password = to_string(pw_b64, 'ascii') rpc_password = to_string(pw_b64, 'ascii')
config.set_key('rpcuser', rpc_user) config.set_key('rpcuser', rpc_user)
config.set_key('rpcpassword', rpc_password, save=True) config.set_key('rpcpassword', rpc_password, save=True)
elif rpc_password == '': elif rpc_password == '':
_logger.warning('RPC authentication is disabled.') from .util import print_stderr
print_stderr('WARNING: RPC authentication is disabled.')
return rpc_user, rpc_password return rpc_user, rpc_password
class WatchTowerServer(Logger): class Daemon(DaemonThread):
def __init__(self, network): def __init__(self, config, fd, is_gui):
Logger.__init__(self) DaemonThread.__init__(self)
self.config = network.config
self.network = network
self.lnwatcher = network.local_watchtower
self.app = web.Application()
self.app.router.add_post("/", self.handle)
self.methods = jsonrpcserver.methods.Methods()
self.methods.add(self.get_ctn)
self.methods.add(self.add_sweep_tx)
async def handle(self, request):
request = await request.text()
self.logger.info(f'{request}')
response = await jsonrpcserver.async_dispatch(request, methods=self.methods)
if response.wanted:
return web.json_response(response.deserialized(), status=response.http_status)
else:
return web.Response()
async def run(self):
host = self.config.get('watchtower_host')
port = self.config.get('watchtower_port', 12345)
self.runner = web.AppRunner(self.app)
await self.runner.setup()
site = web.TCPSite(self.runner, host, port, ssl_context=self.config.get_ssl_context())
await site.start()
async def get_ctn(self, *args):
return await self.lnwatcher.sweepstore.get_ctn(*args)
async def add_sweep_tx(self, *args):
return await self.lnwatcher.sweepstore.add_sweep_tx(*args)
class PayServer(Logger):
def __init__(self, daemon: 'Daemon'):
Logger.__init__(self)
self.daemon = daemon
self.config = daemon.config
self.pending = defaultdict(asyncio.Event)
self.daemon.network.register_callback(self.on_payment, ['payment_received'])
async def on_payment(self, evt, wallet, key, status):
if status == PR_PAID:
await self.pending[key].set()
@ignore_exceptions
@log_exceptions
async def run(self):
host = self.config.get('payserver_host', 'localhost')
port = self.config.get('payserver_port')
root = self.config.get('payserver_root', '/r')
app = web.Application()
app.add_routes([web.post('/api/create_invoice', self.create_request)])
app.add_routes([web.get('/api/get_invoice', self.get_request)])
app.add_routes([web.get('/api/get_status', self.get_status)])
app.add_routes([web.get('/bip70/{key}.bip70', self.get_bip70_request)])
app.add_routes([web.static(root, 'electrum/www')])
runner = web.AppRunner(app)
await runner.setup()
site = web.TCPSite(runner, port=port, host=host, ssl_context=self.config.get_ssl_context())
await site.start()
async def create_request(self, request):
params = await request.post()
wallet = self.daemon.wallet
if 'amount_sat' not in params or not params['amount_sat'].isdigit():
raise web.HTTPUnsupportedMediaType()
amount = int(params['amount_sat'])
message = params['message'] or "donation"
payment_hash = await wallet.lnworker._add_invoice_coro(amount, message, 3600)
key = payment_hash.hex()
raise web.HTTPFound(self.root + '/pay?id=' + key)
async def get_request(self, r):
key = r.query_string
request = self.daemon.wallet.get_request(key)
return web.json_response(request)
async def get_bip70_request(self, r):
from .paymentrequest import make_request
key = r.match_info['key']
request = self.daemon.wallet.get_request(key)
if not request:
return web.HTTPNotFound()
pr = make_request(self.config, request)
return web.Response(body=pr.SerializeToString(), content_type='application/bitcoin-paymentrequest')
async def get_status(self, request):
ws = web.WebSocketResponse()
await ws.prepare(request)
key = request.query_string
info = self.daemon.wallet.get_request(key)
if not info:
await ws.send_str('unknown invoice')
await ws.close()
return ws
if info.get('status') == PR_PAID:
await ws.send_str(f'paid')
await ws.close()
return ws
if info.get('status') == PR_EXPIRED:
await ws.send_str(f'expired')
await ws.close()
return ws
while True:
try:
await asyncio.wait_for(self.pending[key].wait(), 1)
break
except asyncio.TimeoutError:
# send data on the websocket, to keep it alive
await ws.send_str('waiting')
await ws.send_str('paid')
await ws.close()
return ws
class AuthenticationError(Exception):
pass
class AuthenticationInvalidOrMissing(AuthenticationError):
pass
class AuthenticationCredentialsInvalid(AuthenticationError):
pass
class Daemon(Logger):
@profiler
def __init__(self, config: SimpleConfig, fd=None, *, listen_jsonrpc=True):
Logger.__init__(self)
self.auth_lock = asyncio.Lock()
self.running = False
self.running_lock = threading.Lock()
self.config = config self.config = config
if fd is None and listen_jsonrpc: if config.get('offline'):
fd = get_file_descriptor(config) self.network = None
if fd is None:
raise Exception('failed to lock daemon; already running?')
self.asyncio_loop = asyncio.get_event_loop()
self.network = None
if not config.get('offline'):
self.network = Network(config, daemon=self)
self.fx = FxThread(config, self.network)
self.gui_object = None
# path -> wallet; make sure path is standardized.
self._wallets = {} # type: Dict[str, Abstract_Wallet]
daemon_jobs = []
# Setup JSONRPC server
if listen_jsonrpc:
daemon_jobs.append(self.start_jsonrpc(config, fd))
# request server
self.pay_server = None
if not config.get('offline') and self.config.get('run_payserver'):
self.pay_server = PayServer(self)
daemon_jobs.append(self.pay_server.run())
# server-side watchtower
self.watchtower = None
if not config.get('offline') and self.config.get('run_watchtower'):
self.watchtower = WatchTowerServer(self.network)
daemon_jobs.append(self.watchtower.run)
if self.network:
self.network.start(jobs=[self.fx.run])
self.taskgroup = TaskGroup()
asyncio.run_coroutine_threadsafe(self._run(jobs=daemon_jobs), self.asyncio_loop)
@log_exceptions
async def _run(self, jobs: Iterable = None):
if jobs is None:
jobs = []
try:
async with self.taskgroup as group:
[await group.spawn(job) for job in jobs]
await group.spawn(asyncio.Event().wait) # run forever (until cancel)
except BaseException as e:
self.logger.exception('daemon.taskgroup died.')
finally:
self.logger.info("stopping daemon.taskgroup")
async def authenticate(self, headers):
if self.rpc_password == '':
# RPC authentication is disabled
return
auth_string = headers.get('Authorization', None)
if auth_string is None:
raise AuthenticationInvalidOrMissing('CredentialsMissing')
basic, _, encoded = auth_string.partition(' ')
if basic != 'Basic':
raise AuthenticationInvalidOrMissing('UnsupportedType')
encoded = to_bytes(encoded, 'utf8')
credentials = to_string(b64decode(encoded), 'utf8')
username, _, password = credentials.partition(':')
if not (constant_time_compare(username, self.rpc_user)
and constant_time_compare(password, self.rpc_password)):
await asyncio.sleep(0.050)
raise AuthenticationCredentialsInvalid('Invalid Credentials')
async def handle(self, request):
async with self.auth_lock:
try:
await self.authenticate(request.headers)
except AuthenticationInvalidOrMissing:
return web.Response(headers={"WWW-Authenticate": "Basic realm=Electrum"},
text='Unauthorized', status=401)
except AuthenticationCredentialsInvalid:
return web.Response(text='Forbidden', status=403)
request = await request.text()
response = await jsonrpcserver.async_dispatch(request, methods=self.methods)
if isinstance(response, jsonrpcserver.response.ExceptionResponse):
self.logger.error(f"error handling request: {request}", exc_info=response.exc)
# this exposes the error message to the client
response.message = str(response.exc)
if response.wanted:
return web.json_response(response.deserialized(), status=response.http_status)
else: else:
return web.Response() self.network = Network(config)
self.network.start()
self.fx = FxThread(config, self.network)
if self.network:
self.network.add_jobs([self.fx])
self.gui = None
self.wallets = {}
# Setup JSONRPC server
self.init_server(config, fd, is_gui)
async def start_jsonrpc(self, config: SimpleConfig, fd): def init_server(self, config, fd, is_gui):
self.app = web.Application() host = config.get('rpchost', '127.0.0.1')
self.app.router.add_post("/", self.handle) port = config.get('rpcport', 0)
self.rpc_user, self.rpc_password = get_rpc_credentials(config)
self.methods = jsonrpcserver.methods.Methods() rpc_user, rpc_password = get_rpc_credentials(config)
self.methods.add(self.ping) try:
self.methods.add(self.gui) server = VerifyingJSONRPCServer((host, port), logRequests=False,
self.cmd_runner = Commands(config=self.config, network=self.network, daemon=self) rpc_user=rpc_user, rpc_password=rpc_password)
for cmdname in known_commands: except Exception as e:
self.methods.add(getattr(self.cmd_runner, cmdname)) self.print_error('Warning: cannot initialize RPC server on host', host, e)
self.methods.add(self.run_cmdline) self.server = None
self.host = config.get('rpchost', '127.0.0.1') os.close(fd)
self.port = config.get('rpcport', 0) return
self.runner = web.AppRunner(self.app) os.write(fd, bytes(repr((server.socket.getsockname(), time.time())), 'utf8'))
await self.runner.setup()
site = web.TCPSite(self.runner, self.host, self.port)
await site.start()
socket = site._server.sockets[0]
os.write(fd, bytes(repr((socket.getsockname(), time.time())), 'utf8'))
os.close(fd) os.close(fd)
self.server = server
server.timeout = 0.1
server.register_function(self.ping, 'ping')
if is_gui:
server.register_function(self.run_gui, 'gui')
else:
server.register_function(self.run_daemon, 'daemon')
self.cmd_runner = Commands(self.config, None, self.network)
for cmdname in known_commands:
server.register_function(getattr(self.cmd_runner, cmdname), cmdname)
server.register_function(self.run_cmdline, 'run_cmdline')
async def ping(self): def ping(self):
return True return True
async def gui(self, config_options): def run_daemon(self, config_options):
if self.gui_object: config = SimpleConfig(config_options)
if hasattr(self.gui_object, 'new_window'): sub = config.get('subcommand')
path = self.config.get_wallet_path(use_gui_last_wallet=True) assert sub in [None, 'start', 'stop', 'status', 'load_wallet', 'close_wallet']
self.gui_object.new_window(path, config_options.get('url')) if sub in [None, 'start']:
response = "ok" response = "Daemon already running"
elif sub == 'load_wallet':
path = config.get_wallet_path()
wallet = self.load_wallet(path, config.get('password'))
if wallet is not None:
self.cmd_runner.wallet = wallet
run_hook('load_wallet', wallet, None)
response = wallet is not None
elif sub == 'close_wallet':
path = config.get_wallet_path()
if path in self.wallets:
self.stop_wallet(path)
response = True
else: else:
response = "error: current GUI does not support multiple windows" response = False
elif sub == 'status':
if self.network:
p = self.network.get_parameters()
current_wallet = self.cmd_runner.wallet
current_wallet_path = current_wallet.storage.path \
if current_wallet else None
response = {
'path': self.network.config.path,
'server': p[0],
'blockchain_height': self.network.get_local_height(),
'server_height': self.network.get_server_height(),
'spv_nodes': len(self.network.get_interfaces()),
'connected': self.network.is_connected(),
'auto_connect': p[4],
'version': ELECTRUM_VERSION,
'wallets': {k: w.is_up_to_date()
for k, w in self.wallets.items()},
'current_wallet': current_wallet_path,
'fee_per_kb': self.config.fee_per_kb(),
}
else:
response = "Daemon offline"
elif sub == 'stop':
self.stop()
response = "Daemon stopped"
return response
def run_gui(self, config_options):
config = SimpleConfig(config_options)
if self.gui:
#if hasattr(self.gui, 'new_window'):
# path = config.get_wallet_path()
# self.gui.new_window(path, config.get('url'))
# response = "ok"
#else:
# response = "error: current GUI does not support multiple windows"
response = "error: Electrum GUI already running"
else: else:
response = "Error: Electrum is running in daemon mode. Please stop the daemon first." response = "Error: Electrum is running in daemon mode. Please stop the daemon first."
return response return response
def load_wallet(self, path, password, *, manual_upgrades=True) -> Optional[Abstract_Wallet]: def load_wallet(self, path, password):
path = standardize_path(path)
# wizard will be launched if we return # wizard will be launched if we return
if path in self._wallets: if path in self.wallets:
wallet = self._wallets[path] wallet = self.wallets[path]
return wallet return wallet
storage = WalletStorage(path) storage = WalletStorage(path, manual_upgrades=True)
if not storage.file_exists(): if not storage.file_exists():
return return
if storage.is_encrypted(): if storage.is_encrypted():
if not password: if not password:
return return
storage.decrypt(password) storage.decrypt(password)
# read data, pass it to db if storage.requires_split():
db = WalletDB(storage.read(), manual_upgrades=manual_upgrades)
if db.requires_split():
return return
if db.requires_upgrade(): if storage.get_action():
return return
if db.get_action(): wallet = Wallet(storage)
return wallet.start_threads(self.network)
wallet = Wallet(db, storage, config=self.config) self.wallets[path] = wallet
wallet.start_network(self.network)
self._wallets[path] = wallet
self.wallet = wallet
return wallet return wallet
def add_wallet(self, wallet: Abstract_Wallet) -> None: def add_wallet(self, wallet):
path = wallet.storage.path path = wallet.storage.path
path = standardize_path(path) self.wallets[path] = wallet
self._wallets[path] = wallet
def get_wallet(self, path: str) -> Abstract_Wallet: def get_wallet(self, path):
path = standardize_path(path) return self.wallets.get(path)
return self._wallets.get(path)
def get_wallets(self) -> Dict[str, Abstract_Wallet]: def stop_wallet(self, path):
return dict(self._wallets) # copy wallet = self.wallets.pop(path)
def delete_wallet(self, path: str) -> bool:
self.stop_wallet(path)
if os.path.exists(path):
os.unlink(path)
return True
return False
def stop_wallet(self, path: str) -> bool:
"""Returns True iff a wallet was found."""
path = standardize_path(path)
wallet = self._wallets.pop(path, None)
if not wallet:
return False
wallet.stop_threads() wallet.stop_threads()
return True
async def run_cmdline(self, config_options): def run_cmdline(self, config_options):
cmdname = config_options['cmd'] password = config_options.get('password')
new_password = config_options.get('new_password')
config = SimpleConfig(config_options)
# FIXME this is ugly...
config.fee_estimates = self.network.config.fee_estimates.copy()
config.mempool_fees = self.network.config.mempool_fees.copy()
cmdname = config.get('cmd')
cmd = known_commands[cmdname] cmd = known_commands[cmdname]
if cmd.requires_wallet:
path = config.get_wallet_path()
wallet = self.wallets.get(path)
if wallet is None:
return {'error': 'Wallet "%s" is not loaded. Use "electrum daemon load_wallet"'%os.path.basename(path) }
else:
wallet = None
# arguments passed to function # arguments passed to function
args = [config_options.get(x) for x in cmd.params] args = map(lambda x: config.get(x), cmd.params)
# decode json arguments # decode json arguments
args = [json_decode(i) for i in args] args = [json_decode(i) for i in args]
# options # options
kwargs = {} kwargs = {}
for x in cmd.options: for x in cmd.options:
kwargs[x] = config_options.get(x) kwargs[x] = (config_options.get(x) if x in ['password', 'new_password'] else config.get(x))
if cmd.requires_wallet: cmd_runner = Commands(config, wallet, self.network)
kwargs['wallet_path'] = config_options.get('wallet_path') func = getattr(cmd_runner, cmd.name)
func = getattr(self.cmd_runner, cmd.name) result = func(*args, **kwargs)
# fixme: not sure how to retrieve message in jsonrpcclient
try:
result = await func(*args, **kwargs)
except Exception as e:
result = {'error':str(e)}
return result return result
def run_daemon(self): def run(self):
self.running = True while self.is_running():
try: self.server.handle_request() if self.server else time.sleep(0.1)
while self.is_running(): for k, wallet in self.wallets.items():
time.sleep(0.1)
except KeyboardInterrupt:
self.running = False
self.on_stop()
def is_running(self):
with self.running_lock:
return self.running and not self.taskgroup.closed()
def stop(self):
with self.running_lock:
self.running = False
def on_stop(self):
if self.gui_object:
self.gui_object.stop()
# stop network/wallets
for k, wallet in self._wallets.items():
wallet.stop_threads() wallet.stop_threads()
if self.network: if self.network:
self.logger.info("shutting down network") self.print_error("shutting down network")
self.network.stop() self.network.stop()
self.logger.info("stopping taskgroup") self.network.join()
fut = asyncio.run_coroutine_threadsafe(self.taskgroup.cancel_remaining(), self.asyncio_loop) self.on_stop()
try:
fut.result(timeout=2)
except (asyncio.TimeoutError, asyncio.CancelledError):
pass
self.logger.info("removing lockfile")
remove_lockfile(get_lockfile(self.config))
self.logger.info("stopped")
def run_gui(self, config, plugins): def stop(self):
threading.current_thread().setName('GUI') self.print_error("stopping, removing lockfile")
remove_lockfile(get_lockfile(self.config))
DaemonThread.stop(self)
def init_gui(self, config, plugins):
gui_name = config.get('gui', 'qt') gui_name = config.get('gui', 'qt')
if gui_name in ['lite', 'classic']: if gui_name in ['lite', 'classic']:
gui_name = 'qt' gui_name = 'qt'
self.logger.info(f'launching GUI: {gui_name}')
gui = __import__('electrum.gui.' + gui_name, fromlist=['electrum']) gui = __import__('electrum.gui.' + gui_name, fromlist=['electrum'])
self.gui_object = gui.ElectrumGui(config, self, plugins) self.gui = gui.ElectrumGui(config, self, plugins)
try: try:
self.gui_object.main() self.gui.main()
except BaseException as e: except BaseException as e:
self.logger.exception('') traceback.print_exc(file=sys.stdout)
# app will exit now # app will exit now
self.on_stop()

View file

@ -1,100 +0,0 @@
# Copyright (C) 2020 The Electrum developers
# Distributed under the MIT software license, see the accompanying
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
import sys
import socket
import concurrent
from concurrent import futures
import ipaddress
from typing import Optional
import dns
import dns.resolver
from .logging import get_logger
_logger = get_logger(__name__)
_dns_threads_executor = None # type: Optional[concurrent.futures.Executor]
def configure_dns_depending_on_proxy(is_proxy: bool) -> None:
# Store this somewhere so we can un-monkey-patch:
if not hasattr(socket, "_getaddrinfo"):
socket._getaddrinfo = socket.getaddrinfo
if is_proxy:
# prevent dns leaks, see http://stackoverflow.com/questions/13184205/dns-over-proxy
socket.getaddrinfo = lambda *args: [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
else:
if sys.platform == 'win32':
# On Windows, socket.getaddrinfo takes a mutex, and might hold it for up to 10 seconds
# when dns-resolving. To speed it up drastically, we resolve dns ourselves, outside that lock.
# See https://github.com/spesmilo/electrum/issues/4421
_prepare_windows_dns_hack()
socket.getaddrinfo = _fast_getaddrinfo
else:
socket.getaddrinfo = socket._getaddrinfo
def _prepare_windows_dns_hack():
# enable dns cache
resolver = dns.resolver.get_default_resolver()
if resolver.cache is None:
resolver.cache = dns.resolver.Cache()
# prepare threads
global _dns_threads_executor
if _dns_threads_executor is None:
_dns_threads_executor = concurrent.futures.ThreadPoolExecutor(max_workers=20,
thread_name_prefix='dns_resolver')
def _fast_getaddrinfo(host, *args, **kwargs):
def needs_dns_resolving(host):
try:
ipaddress.ip_address(host)
return False # already valid IP
except ValueError:
pass # not an IP
if str(host) in ('localhost', 'localhost.',):
return False
return True
def resolve_with_dnspython(host):
addrs = []
expected_errors = (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer,
concurrent.futures.CancelledError, concurrent.futures.TimeoutError)
ipv6_fut = _dns_threads_executor.submit(dns.resolver.query, host, dns.rdatatype.AAAA)
ipv4_fut = _dns_threads_executor.submit(dns.resolver.query, host, dns.rdatatype.A)
# try IPv6
try:
answers = ipv6_fut.result()
addrs += [str(answer) for answer in answers]
except expected_errors as e:
pass
except BaseException as e:
_logger.info(f'dnspython failed to resolve dns (AAAA) for {repr(host)} with error: {repr(e)}')
# try IPv4
try:
answers = ipv4_fut.result()
addrs += [str(answer) for answer in answers]
except expected_errors as e:
# dns failed for some reason, e.g. dns.resolver.NXDOMAIN this is normal.
# Simply report back failure; except if we already have some results.
if not addrs:
raise socket.gaierror(11001, 'getaddrinfo failed') from e
except BaseException as e:
# Possibly internal error in dnspython :( see #4483 and #5638
_logger.info(f'dnspython failed to resolve dns (A) for {repr(host)} with error: {repr(e)}')
if addrs:
return addrs
# Fall back to original socket.getaddrinfo to resolve dns.
return [host]
addrs = [host]
if needs_dns_resolving(host):
addrs = resolve_with_dnspython(host)
list_of_list_of_socketinfos = [socket._getaddrinfo(addr, *args, **kwargs) for addr in addrs]
list_of_socketinfos = [item for lst in list_of_list_of_socketinfos for item in lst]
return list_of_socketinfos

View file

@ -101,8 +101,8 @@ def python_validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
keyptr = keyptr[2:] keyptr = keyptr[2:]
rsa_e = keyptr[0:bytes] rsa_e = keyptr[0:bytes]
rsa_n = keyptr[bytes:] rsa_n = keyptr[bytes:]
n = int.from_bytes(rsa_n, byteorder='big', signed=False) n = ecdsa.util.string_to_number(rsa_n)
e = int.from_bytes(rsa_e, byteorder='big', signed=False) e = ecdsa.util.string_to_number(rsa_e)
pubkey = rsakey.RSAKey(n, e) pubkey = rsakey.RSAKey(n, e)
sig = rrsig.signature sig = rrsig.signature
@ -110,22 +110,24 @@ def python_validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
if rrsig.algorithm == ECDSAP256SHA256: if rrsig.algorithm == ECDSAP256SHA256:
curve = ecdsa.curves.NIST256p curve = ecdsa.curves.NIST256p
key_len = 32 key_len = 32
digest_len = 32
elif rrsig.algorithm == ECDSAP384SHA384: elif rrsig.algorithm == ECDSAP384SHA384:
curve = ecdsa.curves.NIST384p curve = ecdsa.curves.NIST384p
key_len = 48 key_len = 48
digest_len = 48
else: else:
# shouldn't happen # shouldn't happen
raise ValidationFailure('unknown ECDSA curve') raise ValidationFailure('unknown ECDSA curve')
keyptr = candidate_key.key keyptr = candidate_key.key
x = int.from_bytes(keyptr[0:key_len], byteorder='big', signed=False) x = ecdsa.util.string_to_number(keyptr[0:key_len])
y = int.from_bytes(keyptr[key_len:key_len * 2], byteorder='big', signed=False) y = ecdsa.util.string_to_number(keyptr[key_len:key_len * 2])
assert ecdsa.ecdsa.point_is_valid(curve.generator, x, y) assert ecdsa.ecdsa.point_is_valid(curve.generator, x, y)
point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order) point = ecdsa.ellipticcurve.Point(curve.curve, x, y, curve.order)
verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point, curve) verifying_key = ecdsa.keys.VerifyingKey.from_public_point(point, curve)
r = rrsig.signature[:key_len] r = rrsig.signature[:key_len]
s = rrsig.signature[key_len:] s = rrsig.signature[key_len:]
sig = ecdsa.ecdsa.Signature(int.from_bytes(r, byteorder='big', signed=False), sig = ecdsa.ecdsa.Signature(ecdsa.util.string_to_number(r),
int.from_bytes(s, byteorder='big', signed=False)) ecdsa.util.string_to_number(s))
else: else:
raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm) raise ValidationFailure('unknown algorithm %u' % rrsig.algorithm)
@ -139,7 +141,7 @@ def python_validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
rrnamebuf = rrname.to_digestable(origin) rrnamebuf = rrname.to_digestable(origin)
rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass, rrfixed = struct.pack('!HHI', rdataset.rdtype, rdataset.rdclass,
rrsig.original_ttl) rrsig.original_ttl)
rrlist = sorted(rdataset) rrlist = sorted(rdataset);
for rr in rrlist: for rr in rrlist:
hash.update(rrnamebuf) hash.update(rrnamebuf)
hash.update(rrfixed) hash.update(rrfixed)
@ -156,7 +158,7 @@ def python_validate_rrsig(rrset, rrsig, keys, origin=None, now=None):
return return
elif _is_ecdsa(rrsig.algorithm): elif _is_ecdsa(rrsig.algorithm):
diglong = int.from_bytes(digest, byteorder='big', signed=False) diglong = ecdsa.util.string_to_number(digest)
if verifying_key.pubkey.verifies(diglong, sig): if verifying_key.pubkey.verifies(diglong, sig):
return return
@ -173,10 +175,7 @@ dns.dnssec.validate = dns.dnssec._validate
from .logging import get_logger from .util import print_error
_logger = get_logger(__name__)
# hard-coded trust anchors (root KSKs) # hard-coded trust anchors (root KSKs)
@ -265,7 +264,8 @@ def query(url, rtype):
out = get_and_validate(ns, url, rtype) out = get_and_validate(ns, url, rtype)
validated = True validated = True
except BaseException as e: except BaseException as e:
_logger.info(f"DNSSEC error: {repr(e)}") #traceback.print_exc(file=sys.stderr)
print_error("DNSSEC error:", str(e))
resolver = dns.resolver.get_default_resolver() resolver = dns.resolver.get_default_resolver()
out = resolver.query(url, rtype) out = resolver.query(url, rtype)
validated = False validated = False

View file

@ -24,153 +24,186 @@
# SOFTWARE. # SOFTWARE.
import base64 import base64
import hmac
import hashlib import hashlib
import functools from typing import Union
from typing import Union, Tuple, Optional
from ctypes import (
byref, c_byte, c_int, c_uint, c_char_p, c_size_t, c_void_p, create_string_buffer,
CFUNCTYPE, POINTER, cast
)
from .util import bfh, bh2u, assert_bytes, to_bytes, InvalidPassword, profiler, randrange
from .crypto import (sha256d, aes_encrypt_with_iv, aes_decrypt_with_iv, hmac_oneshot)
from . import constants
from .logging import get_logger
from .ecc_fast import _libsecp256k1, SECP256K1_EC_UNCOMPRESSED
_logger = get_logger(__name__)
def string_to_number(b: bytes) -> int: import ecdsa
return int.from_bytes(b, byteorder='big', signed=False) from ecdsa.ecdsa import curve_secp256k1, generator_secp256k1
from ecdsa.curves import SECP256k1
from ecdsa.ellipticcurve import Point
from ecdsa.util import string_to_number, number_to_string
from .util import bfh, bh2u, assert_bytes, print_error, to_bytes, InvalidPassword, profiler
from .crypto import (Hash, aes_encrypt_with_iv, aes_decrypt_with_iv, hmac_oneshot)
from .ecc_fast import do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1
def sig_string_from_der_sig(der_sig: bytes) -> bytes: do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1()
r, s = get_r_and_s_from_der_sig(der_sig)
return sig_string_from_r_and_s(r, s) CURVE_ORDER = SECP256k1.order
def der_sig_from_sig_string(sig_string: bytes) -> bytes: def generator():
r, s = get_r_and_s_from_sig_string(sig_string) return ECPubkey.from_point(generator_secp256k1)
return der_sig_from_r_and_s(r, s)
def der_sig_from_r_and_s(r: int, s: int) -> bytes: def point_at_infinity():
sig_string = (int.to_bytes(r, length=32, byteorder="big") + return ECPubkey(None)
int.to_bytes(s, length=32, byteorder="big"))
sig = create_string_buffer(64)
ret = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, sig_string)
if not ret:
raise Exception("Bad signature")
ret = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
der_sig = create_string_buffer(80) # this much space should be enough
der_sig_size = c_size_t(len(der_sig))
ret = _libsecp256k1.secp256k1_ecdsa_signature_serialize_der(_libsecp256k1.ctx, der_sig, byref(der_sig_size), sig)
if not ret:
raise Exception("failed to serialize DER sig")
der_sig_size = der_sig_size.value
return bytes(der_sig)[:der_sig_size]
def get_r_and_s_from_der_sig(der_sig: bytes) -> Tuple[int, int]: def sig_string_from_der_sig(der_sig, order=CURVE_ORDER):
assert isinstance(der_sig, bytes) r, s = ecdsa.util.sigdecode_der(der_sig, order)
sig = create_string_buffer(64) return ecdsa.util.sigencode_string(r, s, order)
ret = _libsecp256k1.secp256k1_ecdsa_signature_parse_der(_libsecp256k1.ctx, sig, der_sig, len(der_sig))
if not ret:
raise Exception("Bad signature") def der_sig_from_sig_string(sig_string, order=CURVE_ORDER):
ret = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig) r, s = ecdsa.util.sigdecode_string(sig_string, order)
compact_signature = create_string_buffer(64) return ecdsa.util.sigencode_der_canonize(r, s, order)
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
r = int.from_bytes(compact_signature[:32], byteorder="big")
s = int.from_bytes(compact_signature[32:], byteorder="big") def der_sig_from_r_and_s(r, s, order=CURVE_ORDER):
return ecdsa.util.sigencode_der_canonize(r, s, order)
def get_r_and_s_from_der_sig(der_sig, order=CURVE_ORDER):
r, s = ecdsa.util.sigdecode_der(der_sig, order)
return r, s return r, s
def get_r_and_s_from_sig_string(sig_string: bytes) -> Tuple[int, int]: def get_r_and_s_from_sig_string(sig_string, order=CURVE_ORDER):
if not (isinstance(sig_string, bytes) and len(sig_string) == 64): r, s = ecdsa.util.sigdecode_string(sig_string, order)
raise Exception("sig_string must be bytes, and 64 bytes exactly")
sig = create_string_buffer(64)
ret = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, sig_string)
if not ret:
raise Exception("Bad signature")
ret = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
compact_signature = create_string_buffer(64)
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
r = int.from_bytes(compact_signature[:32], byteorder="big")
s = int.from_bytes(compact_signature[32:], byteorder="big")
return r, s return r, s
def sig_string_from_r_and_s(r: int, s: int) -> bytes: def sig_string_from_r_and_s(r, s, order=CURVE_ORDER):
sig_string = (int.to_bytes(r, length=32, byteorder="big") + return ecdsa.util.sigencode_string_canonize(r, s, order)
int.to_bytes(s, length=32, byteorder="big"))
sig = create_string_buffer(64)
ret = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, sig_string)
if not ret:
raise Exception("Bad signature")
ret = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
compact_signature = create_string_buffer(64)
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
return bytes(compact_signature)
def _x_and_y_from_pubkey_bytes(pubkey: bytes) -> Tuple[int, int]: def point_to_ser(P, compressed=True) -> bytes:
pubkey_ptr = create_string_buffer(64) if isinstance(P, tuple):
assert isinstance(pubkey, bytes), f'pubkey must be bytes, not {type(pubkey)}' assert len(P) == 2, 'unexpected point: %s' % P
ret = _libsecp256k1.secp256k1_ec_pubkey_parse( x, y = P
_libsecp256k1.ctx, pubkey_ptr, pubkey, len(pubkey)) else:
if not ret: x, y = P.x(), P.y()
raise InvalidECPointException('public key could not be parsed or is invalid') if x is None or y is None: # infinity
return None
if compressed:
return bfh(('%02x' % (2+(y&1))) + ('%064x' % x))
return bfh('04'+('%064x' % x)+('%064x' % y))
pubkey_serialized = create_string_buffer(65)
pubkey_size = c_size_t(65) def get_y_coord_from_x(x, odd=True):
_libsecp256k1.secp256k1_ec_pubkey_serialize( curve = curve_secp256k1
_libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey_ptr, SECP256K1_EC_UNCOMPRESSED) _p = curve.p()
pubkey_serialized = bytes(pubkey_serialized) _a = curve.a()
assert pubkey_serialized[0] == 0x04, pubkey_serialized _b = curve.b()
x = int.from_bytes(pubkey_serialized[1:33], byteorder='big', signed=False) for offset in range(128):
y = int.from_bytes(pubkey_serialized[33:65], byteorder='big', signed=False) Mx = x + offset
return x, y My2 = pow(Mx, 3, _p) + _a * pow(Mx, 2, _p) + _b % _p
My = pow(My2, (_p + 1) // 4, _p)
if curve.contains_point(Mx, My):
if odd == bool(My & 1):
return My
return _p - My
raise Exception('ECC_YfromX: No Y found')
def ser_to_point(ser: bytes) -> (int, int):
if ser[0] not in (0x02, 0x03, 0x04):
raise ValueError('Unexpected first byte: {}'.format(ser[0]))
if ser[0] == 0x04:
return string_to_number(ser[1:33]), string_to_number(ser[33:])
x = string_to_number(ser[1:])
return x, get_y_coord_from_x(x, ser[0] == 0x03)
def _ser_to_python_ecdsa_point(ser: bytes) -> ecdsa.ellipticcurve.Point:
x, y = ser_to_point(ser)
try:
return Point(curve_secp256k1, x, y, CURVE_ORDER)
except:
raise InvalidECPointException()
class InvalidECPointException(Exception): class InvalidECPointException(Exception):
"""e.g. not on curve, or infinity""" """e.g. not on curve, or infinity"""
@functools.total_ordering class _MyVerifyingKey(ecdsa.VerifyingKey):
@classmethod
def from_signature(klass, sig, recid, h, curve): # TODO use libsecp??
""" See http://www.secg.org/download/aid-780/sec1-v2.pdf, chapter 4.1.6 """
from ecdsa import util, numbertheory
from . import msqr
curveFp = curve.curve
G = curve.generator
order = G.order()
# extract r,s from signature
r, s = util.sigdecode_string(sig, order)
# 1.1
x = r + (recid//2) * order
# 1.3
alpha = ( x * x * x + curveFp.a() * x + curveFp.b() ) % curveFp.p()
beta = msqr.modular_sqrt(alpha, curveFp.p())
y = beta if (beta - recid) % 2 == 0 else curveFp.p() - beta
# 1.4 the constructor checks that nR is at infinity
try:
R = Point(curveFp, x, y, order)
except:
raise InvalidECPointException()
# 1.5 compute e from message:
e = string_to_number(h)
minus_e = -e % order
# 1.6 compute Q = r^-1 (sR - eG)
inv_r = numbertheory.inverse_mod(r,order)
try:
Q = inv_r * ( s * R + minus_e * G )
except:
raise InvalidECPointException()
return klass.from_public_point( Q, curve )
class _MySigningKey(ecdsa.SigningKey):
"""Enforce low S values in signatures"""
def sign_number(self, number, entropy=None, k=None):
r, s = ecdsa.SigningKey.sign_number(self, number, entropy, k)
if s > CURVE_ORDER//2:
s = CURVE_ORDER - s
return r, s
class _PubkeyForPointAtInfinity:
point = ecdsa.ellipticcurve.INFINITY
class ECPubkey(object): class ECPubkey(object):
def __init__(self, b: Optional[bytes]): def __init__(self, b: bytes):
if b is not None: if b is not None:
assert isinstance(b, (bytes, bytearray)), f'pubkey must be bytes-like, not {type(b)}' assert_bytes(b)
if isinstance(b, bytearray): point = _ser_to_python_ecdsa_point(b)
b = bytes(b) self._pubkey = ecdsa.ecdsa.Public_key(generator_secp256k1, point)
self._x, self._y = _x_and_y_from_pubkey_bytes(b)
else: else:
self._x, self._y = None, None self._pubkey = _PubkeyForPointAtInfinity()
@classmethod @classmethod
def from_sig_string(cls, sig_string: bytes, recid: int, msg_hash: bytes) -> 'ECPubkey': def from_sig_string(cls, sig_string: bytes, recid: int, msg_hash: bytes):
assert_bytes(sig_string) assert_bytes(sig_string)
if len(sig_string) != 64: if len(sig_string) != 64:
raise Exception(f'wrong encoding used for signature? len={len(sig_string)} (should be 64)') raise Exception('Wrong encoding')
if recid < 0 or recid > 3: if recid < 0 or recid > 3:
raise ValueError('recid is {}, but should be 0 <= recid <= 3'.format(recid)) raise ValueError('recid is {}, but should be 0 <= recid <= 3'.format(recid))
sig65 = create_string_buffer(65) ecdsa_verifying_key = _MyVerifyingKey.from_signature(sig_string, recid, msg_hash, curve=SECP256k1)
ret = _libsecp256k1.secp256k1_ecdsa_recoverable_signature_parse_compact( ecdsa_point = ecdsa_verifying_key.pubkey.point
_libsecp256k1.ctx, sig65, sig_string, recid) return ECPubkey.from_point(ecdsa_point)
if not ret:
raise Exception('failed to parse signature')
pubkey = create_string_buffer(64)
ret = _libsecp256k1.secp256k1_ecdsa_recover(_libsecp256k1.ctx, pubkey, sig65, msg_hash)
if not ret:
raise InvalidECPointException('failed to recover public key')
return ECPubkey._from_libsecp256k1_pubkey_ptr(pubkey)
@classmethod @classmethod
def from_signature65(cls, sig: bytes, msg_hash: bytes) -> Tuple['ECPubkey', bool]: def from_signature65(cls, sig: bytes, msg_hash: bytes):
if len(sig) != 65: if len(sig) != 65:
raise Exception(f'wrong encoding used for signature? len={len(sig)} (should be 65)') raise Exception("Wrong encoding")
nV = sig[0] nV = sig[0]
if nV < 27 or nV >= 35: if nV < 27 or nV >= 35:
raise Exception("Bad encoding") raise Exception("Bad encoding")
@ -183,70 +216,25 @@ class ECPubkey(object):
return cls.from_sig_string(sig[1:], recid, msg_hash), compressed return cls.from_sig_string(sig[1:], recid, msg_hash), compressed
@classmethod @classmethod
def from_x_and_y(cls, x: int, y: int) -> 'ECPubkey': def from_point(cls, point):
_bytes = (b'\x04' _bytes = point_to_ser(point, compressed=False) # faster than compressed
+ int.to_bytes(x, length=32, byteorder='big', signed=False)
+ int.to_bytes(y, length=32, byteorder='big', signed=False))
return ECPubkey(_bytes) return ECPubkey(_bytes)
def get_public_key_bytes(self, compressed=True): def get_public_key_bytes(self, compressed=True):
if self.is_at_infinity(): raise Exception('point is at infinity') if self.is_at_infinity(): raise Exception('point is at infinity')
x = int.to_bytes(self.x(), length=32, byteorder='big', signed=False) return point_to_ser(self.point(), compressed)
y = int.to_bytes(self.y(), length=32, byteorder='big', signed=False)
if compressed:
header = b'\x03' if self.y() & 1 else b'\x02'
return header + x
else:
header = b'\x04'
return header + x + y
def get_public_key_hex(self, compressed=True): def get_public_key_hex(self, compressed=True):
return bh2u(self.get_public_key_bytes(compressed)) return bh2u(self.get_public_key_bytes(compressed))
def point(self) -> Tuple[int, int]: def point(self) -> (int, int):
return self.x(), self.y() return self._pubkey.point.x(), self._pubkey.point.y()
def x(self) -> int:
return self._x
def y(self) -> int:
return self._y
def _to_libsecp256k1_pubkey_ptr(self):
pubkey = create_string_buffer(64)
public_pair_bytes = self.get_public_key_bytes(compressed=False)
ret = _libsecp256k1.secp256k1_ec_pubkey_parse(
_libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
if not ret:
raise Exception('public key could not be parsed or is invalid')
return pubkey
@classmethod
def _from_libsecp256k1_pubkey_ptr(cls, pubkey) -> 'ECPubkey':
pubkey_serialized = create_string_buffer(65)
pubkey_size = c_size_t(65)
_libsecp256k1.secp256k1_ec_pubkey_serialize(
_libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey, SECP256K1_EC_UNCOMPRESSED)
return ECPubkey(bytes(pubkey_serialized))
def __repr__(self):
if self.is_at_infinity():
return f"<ECPubkey infinity>"
return f"<ECPubkey {self.get_public_key_hex()}>"
def __mul__(self, other: int): def __mul__(self, other: int):
if not isinstance(other, int): if not isinstance(other, int):
raise TypeError('multiplication not defined for ECPubkey and {}'.format(type(other))) raise TypeError('multiplication not defined for ECPubkey and {}'.format(type(other)))
ecdsa_point = self._pubkey.point * other
other %= CURVE_ORDER return self.from_point(ecdsa_point)
if self.is_at_infinity() or other == 0:
return POINT_AT_INFINITY
pubkey = self._to_libsecp256k1_pubkey_ptr()
ret = _libsecp256k1.secp256k1_ec_pubkey_tweak_mul(_libsecp256k1.ctx, pubkey, other.to_bytes(32, byteorder="big"))
if not ret:
return POINT_AT_INFINITY
return ECPubkey._from_libsecp256k1_pubkey_ptr(pubkey)
def __rmul__(self, other: int): def __rmul__(self, other: int):
return self * other return self * other
@ -254,40 +242,19 @@ class ECPubkey(object):
def __add__(self, other): def __add__(self, other):
if not isinstance(other, ECPubkey): if not isinstance(other, ECPubkey):
raise TypeError('addition not defined for ECPubkey and {}'.format(type(other))) raise TypeError('addition not defined for ECPubkey and {}'.format(type(other)))
if self.is_at_infinity(): return other ecdsa_point = self._pubkey.point + other._pubkey.point
if other.is_at_infinity(): return self return self.from_point(ecdsa_point)
pubkey1 = self._to_libsecp256k1_pubkey_ptr() def __eq__(self, other):
pubkey2 = other._to_libsecp256k1_pubkey_ptr() return self._pubkey.point.x() == other._pubkey.point.x() \
pubkey_sum = create_string_buffer(64) and self._pubkey.point.y() == other._pubkey.point.y()
pubkey1 = cast(pubkey1, c_char_p)
pubkey2 = cast(pubkey2, c_char_p)
array_of_pubkey_ptrs = (c_char_p * 2)(pubkey1, pubkey2)
ret = _libsecp256k1.secp256k1_ec_pubkey_combine(_libsecp256k1.ctx, pubkey_sum, array_of_pubkey_ptrs, 2)
if not ret:
return POINT_AT_INFINITY
return ECPubkey._from_libsecp256k1_pubkey_ptr(pubkey_sum)
def __eq__(self, other) -> bool:
if not isinstance(other, ECPubkey):
return False
return self.point() == other.point()
def __ne__(self, other): def __ne__(self, other):
return not (self == other) return not (self == other)
def __hash__(self): def verify_message_for_address(self, sig65: bytes, message: bytes) -> None:
return hash(self.point())
def __lt__(self, other):
if not isinstance(other, ECPubkey):
raise TypeError('comparison not defined for ECPubkey and {}'.format(type(other)))
return (self.x() or 0) < (other.x() or 0)
def verify_message_for_address(self, sig65: bytes, message: bytes, algo=lambda x: sha256d(msg_magic(x))) -> None:
assert_bytes(message) assert_bytes(message)
h = algo(message) h = Hash(msg_magic(message))
public_key, compressed = self.from_signature65(sig65, h) public_key, compressed = self.from_signature65(sig65, h)
# check public key # check public key
if public_key != self: if public_key != self:
@ -295,31 +262,23 @@ class ECPubkey(object):
# check message # check message
self.verify_message_hash(sig65[1:], h) self.verify_message_hash(sig65[1:], h)
# TODO return bool instead of raising
def verify_message_hash(self, sig_string: bytes, msg_hash: bytes) -> None: def verify_message_hash(self, sig_string: bytes, msg_hash: bytes) -> None:
assert_bytes(sig_string) assert_bytes(sig_string)
if len(sig_string) != 64: if len(sig_string) != 64:
raise Exception(f'wrong encoding used for signature? len={len(sig_string)} (should be 64)') raise Exception('Wrong encoding')
if not (isinstance(msg_hash, bytes) and len(msg_hash) == 32): ecdsa_point = self._pubkey.point
raise Exception("msg_hash must be bytes, and 32 bytes exactly") verifying_key = _MyVerifyingKey.from_public_point(ecdsa_point, curve=SECP256k1)
verifying_key.verify_digest(sig_string, msg_hash, sigdecode=ecdsa.util.sigdecode_string)
sig = create_string_buffer(64) def encrypt_message(self, message: bytes, magic: bytes = b'BIE1'):
ret = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, sig_string)
if not ret:
raise Exception("Bad signature")
ret = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
pubkey = self._to_libsecp256k1_pubkey_ptr()
if 1 != _libsecp256k1.secp256k1_ecdsa_verify(_libsecp256k1.ctx, sig, msg_hash, pubkey):
raise Exception("Bad signature")
def encrypt_message(self, message: bytes, magic: bytes = b'BIE1') -> bytes:
""" """
ECIES encryption/decryption methods; AES-128-CBC with PKCS7 is used as the cipher; hmac-sha256 is used as the mac ECIES encryption/decryption methods; AES-128-CBC with PKCS7 is used as the cipher; hmac-sha256 is used as the mac
""" """
assert_bytes(message) assert_bytes(message)
ephemeral = ECPrivkey.generate_random_key() randint = ecdsa.util.randrange(CURVE_ORDER)
ephemeral_exponent = number_to_string(randint, CURVE_ORDER)
ephemeral = ECPrivkey(ephemeral_exponent)
ecdh_key = (self * ephemeral.secret_scalar).get_public_key_bytes(compressed=True) ecdh_key = (self * ephemeral.secret_scalar).get_public_key_bytes(compressed=True)
key = hashlib.sha512(ecdh_key).digest() key = hashlib.sha512(ecdh_key).digest()
iv, key_e, key_m = key[0:16], key[16:32], key[32:] iv, key_e, key_m = key[0:16], key[16:32], key[32:]
@ -335,47 +294,25 @@ class ECPubkey(object):
return CURVE_ORDER return CURVE_ORDER
def is_at_infinity(self): def is_at_infinity(self):
return self == POINT_AT_INFINITY return self == point_at_infinity()
@classmethod
def is_pubkey_bytes(cls, b: bytes):
try:
ECPubkey(b)
return True
except:
return False
GENERATOR = ECPubkey(bytes.fromhex('0479be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798'
'483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8'))
CURVE_ORDER = 0xFFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFE_BAAEDCE6_AF48A03B_BFD25E8C_D0364141
POINT_AT_INFINITY = ECPubkey(None)
def msg_magic(message: bytes) -> bytes: def msg_magic(message: bytes) -> bytes:
from .bitcoin import var_int from .bitcoin import var_int
length = bfh(var_int(len(message))) length = bfh(var_int(len(message)))
return b"\x18LBRYcrd Signed Message:\n" + length + message return b"\x18Bitcoin Signed Message:\n" + length + message
def verify_signature(pubkey: bytes, sig: bytes, h: bytes) -> bool: def verify_message_with_address(address: str, sig65: bytes, message: bytes):
try:
ECPubkey(pubkey).verify_message_hash(sig, h)
except:
return False
return True
def verify_message_with_address(address: str, sig65: bytes, message: bytes, *, net=None):
from .bitcoin import pubkey_to_address from .bitcoin import pubkey_to_address
assert_bytes(sig65, message) assert_bytes(sig65, message)
if net is None: net = constants.net
try: try:
h = sha256d(msg_magic(message)) h = Hash(msg_magic(message))
public_key, compressed = ECPubkey.from_signature65(sig65, h) public_key, compressed = ECPubkey.from_signature65(sig65, h)
# check public key using the address # check public key using the address
pubkey_hex = public_key.get_public_key_hex(compressed) pubkey_hex = public_key.get_public_key_hex(compressed)
for txin_type in ['p2pkh','p2wpkh','p2wpkh-p2sh']: for txin_type in ['p2pkh','p2wpkh','p2wpkh-p2sh']:
addr = pubkey_to_address(txin_type, pubkey_hex, net=net) addr = pubkey_to_address(txin_type, pubkey_hex)
if address == addr: if address == addr:
break break
else: else:
@ -384,7 +321,7 @@ def verify_message_with_address(address: str, sig65: bytes, message: bytes, *, n
public_key.verify_message_hash(sig65[1:], h) public_key.verify_message_hash(sig65[1:], h)
return True return True
except Exception as e: except Exception as e:
_logger.info(f"Verification error: {repr(e)}") print_error("Verification error: {0}".format(e))
return False return False
@ -405,12 +342,13 @@ class ECPrivkey(ECPubkey):
raise InvalidECPointException('Invalid secret scalar (not within curve order)') raise InvalidECPointException('Invalid secret scalar (not within curve order)')
self.secret_scalar = secret self.secret_scalar = secret
pubkey = GENERATOR * secret point = generator_secp256k1 * secret
super().__init__(pubkey.get_public_key_bytes(compressed=False)) super().__init__(point_to_ser(point))
self._privkey = ecdsa.ecdsa.Private_key(self._pubkey, secret)
@classmethod @classmethod
def from_secret_scalar(cls, secret_scalar: int): def from_secret_scalar(cls, secret_scalar: int):
secret_bytes = int.to_bytes(secret_scalar, length=32, byteorder='big', signed=False) secret_bytes = number_to_string(secret_scalar, CURVE_ORDER)
return ECPrivkey(secret_bytes) return ECPrivkey(secret_bytes)
@classmethod @classmethod
@ -426,64 +364,32 @@ class ECPrivkey(ECPubkey):
scalar = string_to_number(privkey_bytes) % CURVE_ORDER scalar = string_to_number(privkey_bytes) % CURVE_ORDER
if scalar == 0: if scalar == 0:
raise Exception('invalid EC private key scalar: zero') raise Exception('invalid EC private key scalar: zero')
privkey_32bytes = int.to_bytes(scalar, length=32, byteorder='big', signed=False) privkey_32bytes = number_to_string(scalar, CURVE_ORDER)
return privkey_32bytes return privkey_32bytes
def __repr__(self): def sign(self, data: bytes, sigencode=None, sigdecode=None) -> bytes:
return f"<ECPrivkey {self.get_public_key_hex()}>"
@classmethod
def generate_random_key(cls):
randint = randrange(CURVE_ORDER)
ephemeral_exponent = int.to_bytes(randint, length=32, byteorder='big', signed=False)
return ECPrivkey(ephemeral_exponent)
def get_secret_bytes(self) -> bytes:
return int.to_bytes(self.secret_scalar, length=32, byteorder='big', signed=False)
def sign(self, msg_hash: bytes, sigencode=None) -> bytes:
if not (isinstance(msg_hash, bytes) and len(msg_hash) == 32):
raise Exception("msg_hash to be signed must be bytes, and 32 bytes exactly")
if sigencode is None: if sigencode is None:
sigencode = sig_string_from_r_and_s sigencode = sig_string_from_r_and_s
if sigdecode is None:
privkey_bytes = self.secret_scalar.to_bytes(32, byteorder="big") sigdecode = get_r_and_s_from_sig_string
nonce_function = None private_key = _MySigningKey.from_secret_exponent(self.secret_scalar, curve=SECP256k1)
sig = create_string_buffer(64) sig = private_key.sign_digest_deterministic(data, hashfunc=hashlib.sha256, sigencode=sigencode)
def sign_with_extra_entropy(extra_entropy): public_key = private_key.get_verifying_key()
ret = _libsecp256k1.secp256k1_ecdsa_sign( if not public_key.verify_digest(sig, data, sigdecode=sigdecode):
_libsecp256k1.ctx, sig, msg_hash, privkey_bytes, raise Exception('Sanity check verifying our own signature failed.')
nonce_function, extra_entropy)
if not ret:
raise Exception('the nonce generation function failed, or the private key was invalid')
compact_signature = create_string_buffer(64)
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
r = int.from_bytes(compact_signature[:32], byteorder="big")
s = int.from_bytes(compact_signature[32:], byteorder="big")
return r, s
r, s = sign_with_extra_entropy(extra_entropy=None)
counter = 0
while r >= 2**255: # grind for low R value https://github.com/bitcoin/bitcoin/pull/13666
counter += 1
extra_entropy = counter.to_bytes(32, byteorder="little")
r, s = sign_with_extra_entropy(extra_entropy=extra_entropy)
sig_string = sig_string_from_r_and_s(r, s)
self.verify_message_hash(sig_string, msg_hash)
sig = sigencode(r, s)
return sig return sig
def sign_transaction(self, hashed_preimage: bytes) -> bytes: def sign_transaction(self, hashed_preimage: bytes) -> bytes:
return self.sign(hashed_preimage, sigencode=der_sig_from_r_and_s) return self.sign(hashed_preimage,
sigencode=der_sig_from_r_and_s,
sigdecode=get_r_and_s_from_der_sig)
def sign_message(self, message: bytes, is_compressed: bool, algo=lambda x: sha256d(msg_magic(x))) -> bytes: def sign_message(self, message: bytes, is_compressed: bool) -> bytes:
def bruteforce_recid(sig_string): def bruteforce_recid(sig_string):
for recid in range(4): for recid in range(4):
sig65 = construct_sig65(sig_string, recid, is_compressed) sig65 = construct_sig65(sig_string, recid, is_compressed)
try: try:
self.verify_message_for_address(sig65, message, algo) self.verify_message_for_address(sig65, message)
return sig65, recid return sig65, recid
except Exception as e: except Exception as e:
continue continue
@ -491,13 +397,15 @@ class ECPrivkey(ECPubkey):
raise Exception("error: cannot sign message. no recid fits..") raise Exception("error: cannot sign message. no recid fits..")
message = to_bytes(message, 'utf8') message = to_bytes(message, 'utf8')
msg_hash = algo(message) msg_hash = Hash(msg_magic(message))
sig_string = self.sign(msg_hash, sigencode=sig_string_from_r_and_s) sig_string = self.sign(msg_hash,
sigencode=sig_string_from_r_and_s,
sigdecode=get_r_and_s_from_sig_string)
sig65, recid = bruteforce_recid(sig_string) sig65, recid = bruteforce_recid(sig_string)
return sig65 return sig65
def decrypt_message(self, encrypted: Union[str, bytes], magic: bytes=b'BIE1') -> bytes: def decrypt_message(self, encrypted, magic=b'BIE1'):
encrypted = base64.b64decode(encrypted) # type: bytes encrypted = base64.b64decode(encrypted)
if len(encrypted) < 85: if len(encrypted) < 85:
raise Exception('invalid ciphertext: length') raise Exception('invalid ciphertext: length')
magic_found = encrypted[:4] magic_found = encrypted[:4]
@ -507,9 +415,12 @@ class ECPrivkey(ECPubkey):
if magic_found != magic: if magic_found != magic:
raise Exception('invalid ciphertext: invalid magic bytes') raise Exception('invalid ciphertext: invalid magic bytes')
try: try:
ephemeral_pubkey = ECPubkey(ephemeral_pubkey_bytes) ecdsa_point = _ser_to_python_ecdsa_point(ephemeral_pubkey_bytes)
except InvalidECPointException as e: except AssertionError as e:
raise Exception('invalid ciphertext: invalid ephemeral pubkey') from e raise Exception('invalid ciphertext: invalid ephemeral pubkey') from e
if not ecdsa.ecdsa.point_is_valid(generator_secp256k1, ecdsa_point.x(), ecdsa_point.y()):
raise Exception('invalid ciphertext: invalid ephemeral pubkey')
ephemeral_pubkey = ECPubkey.from_point(ecdsa_point)
ecdh_key = (ephemeral_pubkey * self.secret_scalar).get_public_key_bytes(compressed=True) ecdh_key = (ephemeral_pubkey * self.secret_scalar).get_public_key_bytes(compressed=True)
key = hashlib.sha512(ecdh_key).digest() key = hashlib.sha512(ecdh_key).digest()
iv, key_e, key_m = key[0:16], key[16:32], key[32:] iv, key_e, key_m = key[0:16], key[16:32], key[32:]
@ -518,6 +429,6 @@ class ECPrivkey(ECPubkey):
return aes_decrypt_with_iv(key_e, iv, ciphertext) return aes_decrypt_with_iv(key_e, iv, ciphertext)
def construct_sig65(sig_string: bytes, recid: int, is_compressed: bool) -> bytes: def construct_sig65(sig_string, recid, is_compressed):
comp = 4 if is_compressed else 0 comp = 4 if is_compressed else 0
return bytes([27 + recid + comp]) + sig_string return bytes([27 + recid + comp]) + sig_string

View file

@ -5,15 +5,14 @@ import os
import sys import sys
import traceback import traceback
import ctypes import ctypes
from ctypes.util import find_library
from ctypes import ( from ctypes import (
byref, c_byte, c_int, c_uint, c_char_p, c_size_t, c_void_p, create_string_buffer, byref, c_byte, c_int, c_uint, c_char_p, c_size_t, c_void_p, create_string_buffer, CFUNCTYPE, POINTER
CFUNCTYPE, POINTER, cast
) )
from .logging import get_logger import ecdsa
from .util import print_stderr, print_error
_logger = get_logger(__name__)
SECP256K1_FLAGS_TYPE_MASK = ((1 << 8) - 1) SECP256K1_FLAGS_TYPE_MASK = ((1 << 8) - 1)
@ -33,32 +32,19 @@ SECP256K1_EC_COMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION | SECP256K1_FLAGS_BI
SECP256K1_EC_UNCOMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION) SECP256K1_EC_UNCOMPRESSED = (SECP256K1_FLAGS_TYPE_COMPRESSION)
class LibModuleMissing(Exception): pass
def load_library(): def load_library():
if sys.platform == 'darwin': if sys.platform == 'darwin':
library_paths = (os.path.join(os.path.dirname(__file__), 'libsecp256k1.0.dylib'), library_path = 'libsecp256k1.0.dylib'
'libsecp256k1.0.dylib')
elif sys.platform in ('windows', 'win32'): elif sys.platform in ('windows', 'win32'):
library_paths = (os.path.join(os.path.dirname(__file__), 'libsecp256k1-0.dll'), library_path = 'libsecp256k1.dll'
'libsecp256k1-0.dll')
elif 'ANDROID_DATA' in os.environ: elif 'ANDROID_DATA' in os.environ:
library_paths = ('libsecp256k1.so',) library_path = 'libsecp256k1.so'
else: # desktop Linux and similar else:
library_paths = (os.path.join(os.path.dirname(__file__), 'libsecp256k1.so.0'), library_path = 'libsecp256k1.so.0'
'libsecp256k1.so.0')
secp256k1 = None secp256k1 = ctypes.cdll.LoadLibrary(library_path)
for libpath in library_paths:
try:
secp256k1 = ctypes.cdll.LoadLibrary(libpath)
except:
pass
else:
break
if not secp256k1: if not secp256k1:
_logger.error('libsecp256k1 library failed to load') print_stderr('[ecc] warning: libsecp256k1 library failed to load')
return None return None
try: try:
@ -92,48 +78,146 @@ def load_library():
secp256k1.secp256k1_ecdsa_signature_serialize_compact.argtypes = [c_void_p, c_char_p, c_char_p] secp256k1.secp256k1_ecdsa_signature_serialize_compact.argtypes = [c_void_p, c_char_p, c_char_p]
secp256k1.secp256k1_ecdsa_signature_serialize_compact.restype = c_int secp256k1.secp256k1_ecdsa_signature_serialize_compact.restype = c_int
secp256k1.secp256k1_ecdsa_signature_parse_der.argtypes = [c_void_p, c_char_p, c_char_p, c_size_t]
secp256k1.secp256k1_ecdsa_signature_parse_der.restype = c_int
secp256k1.secp256k1_ecdsa_signature_serialize_der.argtypes = [c_void_p, c_char_p, c_void_p, c_char_p]
secp256k1.secp256k1_ecdsa_signature_serialize_der.restype = c_int
secp256k1.secp256k1_ec_pubkey_tweak_mul.argtypes = [c_void_p, c_char_p, c_char_p] secp256k1.secp256k1_ec_pubkey_tweak_mul.argtypes = [c_void_p, c_char_p, c_char_p]
secp256k1.secp256k1_ec_pubkey_tweak_mul.restype = c_int secp256k1.secp256k1_ec_pubkey_tweak_mul.restype = c_int
secp256k1.secp256k1_ec_pubkey_combine.argtypes = [c_void_p, c_char_p, c_void_p, c_size_t]
secp256k1.secp256k1_ec_pubkey_combine.restype = c_int
# --enable-module-recovery
try:
secp256k1.secp256k1_ecdsa_recover.argtypes = [c_void_p, c_char_p, c_char_p, c_char_p]
secp256k1.secp256k1_ecdsa_recover.restype = c_int
secp256k1.secp256k1_ecdsa_recoverable_signature_parse_compact.argtypes = [c_void_p, c_char_p, c_char_p, c_int]
secp256k1.secp256k1_ecdsa_recoverable_signature_parse_compact.restype = c_int
except (OSError, AttributeError):
raise LibModuleMissing('libsecp256k1 library found but it was built '
'without required module (--enable-module-recovery)')
secp256k1.ctx = secp256k1.secp256k1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY) secp256k1.ctx = secp256k1.secp256k1_context_create(SECP256K1_CONTEXT_SIGN | SECP256K1_CONTEXT_VERIFY)
ret = secp256k1.secp256k1_context_randomize(secp256k1.ctx, os.urandom(32)) r = secp256k1.secp256k1_context_randomize(secp256k1.ctx, os.urandom(32))
if not ret: if r:
_logger.error('secp256k1_context_randomize failed') return secp256k1
else:
print_stderr('[ecc] warning: secp256k1_context_randomize failed')
return None return None
except (OSError, AttributeError):
return secp256k1 #traceback.print_exc(file=sys.stderr)
except (OSError, AttributeError) as e: print_stderr('[ecc] warning: libsecp256k1 library was found and loaded but there was an error when using it')
_logger.error(f'libsecp256k1 library was found and loaded but there was an error when using it: {repr(e)}')
return None return None
_libsecp256k1 = None class _patched_functions:
prepared_to_patch = False
monkey_patching_active = False
def _prepare_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
if not _libsecp256k1:
return
# save original functions so that we can undo patching (needed for tests)
_patched_functions.orig_sign = staticmethod(ecdsa.ecdsa.Private_key.sign)
_patched_functions.orig_verify = staticmethod(ecdsa.ecdsa.Public_key.verifies)
_patched_functions.orig_mul = staticmethod(ecdsa.ellipticcurve.Point.__mul__)
curve_secp256k1 = ecdsa.ecdsa.curve_secp256k1
curve_order = ecdsa.curves.SECP256k1.order
point_at_infinity = ecdsa.ellipticcurve.INFINITY
def mul(self: ecdsa.ellipticcurve.Point, other: int):
if self.curve() != curve_secp256k1:
# this operation is not on the secp256k1 curve; use original implementation
return _patched_functions.orig_mul(self, other)
other %= curve_order
if self == point_at_infinity or other == 0:
return point_at_infinity
pubkey = create_string_buffer(64)
public_pair_bytes = b'\4' + self.x().to_bytes(32, byteorder="big") + self.y().to_bytes(32, byteorder="big")
r = _libsecp256k1.secp256k1_ec_pubkey_parse(
_libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
if not r:
return False
r = _libsecp256k1.secp256k1_ec_pubkey_tweak_mul(_libsecp256k1.ctx, pubkey, other.to_bytes(32, byteorder="big"))
if not r:
return point_at_infinity
pubkey_serialized = create_string_buffer(65)
pubkey_size = c_size_t(65)
_libsecp256k1.secp256k1_ec_pubkey_serialize(
_libsecp256k1.ctx, pubkey_serialized, byref(pubkey_size), pubkey, SECP256K1_EC_UNCOMPRESSED)
x = int.from_bytes(pubkey_serialized[1:33], byteorder="big")
y = int.from_bytes(pubkey_serialized[33:], byteorder="big")
return ecdsa.ellipticcurve.Point(curve_secp256k1, x, y, curve_order)
def sign(self: ecdsa.ecdsa.Private_key, hash: int, random_k: int):
# note: random_k is ignored
if self.public_key.curve != curve_secp256k1:
# this operation is not on the secp256k1 curve; use original implementation
return _patched_functions.orig_sign(self, hash, random_k)
secret_exponent = self.secret_multiplier
nonce_function = None
sig = create_string_buffer(64)
sig_hash_bytes = hash.to_bytes(32, byteorder="big")
_libsecp256k1.secp256k1_ecdsa_sign(
_libsecp256k1.ctx, sig, sig_hash_bytes, secret_exponent.to_bytes(32, byteorder="big"), nonce_function, None)
compact_signature = create_string_buffer(64)
_libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(_libsecp256k1.ctx, compact_signature, sig)
r = int.from_bytes(compact_signature[:32], byteorder="big")
s = int.from_bytes(compact_signature[32:], byteorder="big")
return ecdsa.ecdsa.Signature(r, s)
def verify(self: ecdsa.ecdsa.Public_key, hash: int, signature: ecdsa.ecdsa.Signature):
if self.curve != curve_secp256k1:
# this operation is not on the secp256k1 curve; use original implementation
return _patched_functions.orig_verify(self, hash, signature)
sig = create_string_buffer(64)
input64 = signature.r.to_bytes(32, byteorder="big") + signature.s.to_bytes(32, byteorder="big")
r = _libsecp256k1.secp256k1_ecdsa_signature_parse_compact(_libsecp256k1.ctx, sig, input64)
if not r:
return False
r = _libsecp256k1.secp256k1_ecdsa_signature_normalize(_libsecp256k1.ctx, sig, sig)
public_pair_bytes = b'\4' + self.point.x().to_bytes(32, byteorder="big") + self.point.y().to_bytes(32, byteorder="big")
pubkey = create_string_buffer(64)
r = _libsecp256k1.secp256k1_ec_pubkey_parse(
_libsecp256k1.ctx, pubkey, public_pair_bytes, len(public_pair_bytes))
if not r:
return False
return 1 == _libsecp256k1.secp256k1_ecdsa_verify(_libsecp256k1.ctx, sig, hash.to_bytes(32, byteorder="big"), pubkey)
# save new functions so that we can (re-)do patching
_patched_functions.fast_sign = sign
_patched_functions.fast_verify = verify
_patched_functions.fast_mul = mul
_patched_functions.prepared_to_patch = True
def do_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
if not _libsecp256k1:
# FIXME print_error will always print as 'verbosity' is not yet initialised
print_error('[ecc] info: libsecp256k1 library not available, falling back to python-ecdsa. '
'This means signing operations will be slower.')
return
if not _patched_functions.prepared_to_patch:
raise Exception("can't patch python-ecdsa without preparations")
ecdsa.ecdsa.Private_key.sign = _patched_functions.fast_sign
ecdsa.ecdsa.Public_key.verifies = _patched_functions.fast_verify
ecdsa.ellipticcurve.Point.__mul__ = _patched_functions.fast_mul
# ecdsa.ellipticcurve.Point.__add__ = ... # TODO??
_patched_functions.monkey_patching_active = True
def undo_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1():
if not _libsecp256k1:
return
if not _patched_functions.prepared_to_patch:
raise Exception("can't patch python-ecdsa without preparations")
ecdsa.ecdsa.Private_key.sign = _patched_functions.orig_sign
ecdsa.ecdsa.Public_key.verifies = _patched_functions.orig_verify
ecdsa.ellipticcurve.Point.__mul__ = _patched_functions.orig_mul
_patched_functions.monkey_patching_active = False
def is_using_fast_ecc():
return _patched_functions.monkey_patching_active
try: try:
_libsecp256k1 = load_library() _libsecp256k1 = load_library()
except BaseException as e: except:
_logger.error(f'failed to load libsecp256k1: {repr(e)}') _libsecp256k1 = None
#traceback.print_exc(file=sys.stderr)
_prepare_monkey_patching_of_python_ecdsa_internals_with_libsecp256k1()
if _libsecp256k1 is None:
# hard fail:
sys.exit(f"Error: Failed to load libsecp256k1.")

View file

@ -1,29 +1,18 @@
import asyncio
from datetime import datetime from datetime import datetime
import inspect import inspect
import requests
import sys import sys
import os import os
import json import json
from threading import Thread
import time import time
import csv import csv
import decimal import decimal
from decimal import Decimal from decimal import Decimal
from typing import Sequence, Optional
from aiorpcx.curio import timeout_after, TaskTimeout, TaskGroup
from .bitcoin import COIN from .bitcoin import COIN
from .i18n import _ from .i18n import _
from .util import (ThreadJob, make_dir, log_exceptions, from .util import PrintError, ThreadJob, make_dir
make_aiohttp_session, resource_path)
from .network import Network
from .simple_config import SimpleConfig
from .logging import Logger
DEFAULT_ENABLED = False
DEFAULT_CURRENCY = "EUR"
DEFAULT_EXCHANGE = "CoinGecko" # default exchange should ideally provide historical rates
# See https://en.wikipedia.org/wiki/ISO_4217 # See https://en.wikipedia.org/wiki/ISO_4217
@ -35,82 +24,67 @@ CCY_PRECISIONS = {'BHD': 3, 'BIF': 0, 'BYR': 0, 'CLF': 4, 'CLP': 0,
'VUV': 0, 'XAF': 0, 'XAU': 4, 'XOF': 0, 'XPF': 0} 'VUV': 0, 'XAF': 0, 'XAU': 4, 'XOF': 0, 'XPF': 0}
class ExchangeBase(Logger): class ExchangeBase(PrintError):
def __init__(self, on_quotes, on_history): def __init__(self, on_quotes, on_history):
Logger.__init__(self)
self.history = {} self.history = {}
self.quotes = {} self.quotes = {}
self.on_quotes = on_quotes self.on_quotes = on_quotes
self.on_history = on_history self.on_history = on_history
async def get_raw(self, site, get_string): def get_json(self, site, get_string):
# APIs must have https # APIs must have https
url = ''.join(['https://', site, get_string]) url = ''.join(['https://', site, get_string])
network = Network.get_instance() response = requests.request('GET', url, headers={'User-Agent' : 'Electrum'}, timeout=10)
proxy = network.proxy if network else None return response.json()
async with make_aiohttp_session(proxy) as session:
async with session.get(url) as response:
response.raise_for_status()
return await response.text()
async def get_json(self, site, get_string): def get_csv(self, site, get_string):
# APIs must have https
url = ''.join(['https://', site, get_string]) url = ''.join(['https://', site, get_string])
network = Network.get_instance() response = requests.request('GET', url, headers={'User-Agent' : 'Electrum'})
proxy = network.proxy if network else None reader = csv.DictReader(response.content.decode().split('\n'))
async with make_aiohttp_session(proxy) as session:
async with session.get(url) as response:
response.raise_for_status()
# set content_type to None to disable checking MIME type
return await response.json(content_type=None)
async def get_csv(self, site, get_string):
raw = await self.get_raw(site, get_string)
reader = csv.DictReader(raw.split('\n'))
return list(reader) return list(reader)
def name(self): def name(self):
return self.__class__.__name__ return self.__class__.__name__
async def update_safe(self, ccy): def update_safe(self, ccy):
try: try:
self.logger.info(f"getting fx quotes for {ccy}") self.print_error("getting fx quotes for", ccy)
self.quotes = await self.get_rates(ccy) self.quotes = self.get_rates(ccy)
self.logger.info("received fx quotes") self.print_error("received fx quotes")
except asyncio.CancelledError:
# CancelledError must be passed-through for cancellation to work
raise
except BaseException as e: except BaseException as e:
self.logger.info(f"failed fx quotes: {repr(e)}") self.print_error("failed fx quotes:", e)
self.quotes = {}
self.on_quotes() self.on_quotes()
def read_historical_rates(self, ccy, cache_dir) -> Optional[dict]: def update(self, ccy):
t = Thread(target=self.update_safe, args=(ccy,))
t.setDaemon(True)
t.start()
def read_historical_rates(self, ccy, cache_dir):
filename = os.path.join(cache_dir, self.name() + '_'+ ccy) filename = os.path.join(cache_dir, self.name() + '_'+ ccy)
if not os.path.exists(filename): if os.path.exists(filename):
return None timestamp = os.stat(filename).st_mtime
timestamp = os.stat(filename).st_mtime try:
try: with open(filename, 'r', encoding='utf-8') as f:
with open(filename, 'r', encoding='utf-8') as f: h = json.loads(f.read())
h = json.loads(f.read()) h['timestamp'] = timestamp
except: except:
return None h = None
if not h: # e.g. empty dict else:
return None h = None
h['timestamp'] = timestamp if h:
self.history[ccy] = h self.history[ccy] = h
self.on_history() self.on_history()
return h return h
@log_exceptions def get_historical_rates_safe(self, ccy, cache_dir):
async def get_historical_rates_safe(self, ccy, cache_dir):
try: try:
self.logger.info(f"requesting fx history for {ccy}") self.print_error("requesting fx history for", ccy)
h = await self.request_history(ccy) h = self.request_history(ccy)
self.logger.info(f"received fx history for {ccy}") self.print_error("received fx history for", ccy)
except BaseException as e: except BaseException as e:
self.logger.info(f"failed fx history: {repr(e)}") self.print_error("failed fx history:", e)
return return
filename = os.path.join(cache_dir, self.name() + '_' + ccy) filename = os.path.join(cache_dir, self.name() + '_' + ccy)
with open(filename, 'w', encoding='utf-8') as f: with open(filename, 'w', encoding='utf-8') as f:
@ -126,7 +100,9 @@ class ExchangeBase(Logger):
if h is None: if h is None:
h = self.read_historical_rates(ccy, cache_dir) h = self.read_historical_rates(ccy, cache_dir)
if h is None or h['timestamp'] < time.time() - 24*3600: if h is None or h['timestamp'] < time.time() - 24*3600:
asyncio.get_event_loop().create_task(self.get_historical_rates_safe(ccy, cache_dir)) t = Thread(target=self.get_historical_rates_safe, args=(ccy, cache_dir))
t.setDaemon(True)
t.start()
def history_ccys(self): def history_ccys(self):
return [] return []
@ -134,38 +110,43 @@ class ExchangeBase(Logger):
def historical_rate(self, ccy, d_t): def historical_rate(self, ccy, d_t):
return self.history.get(ccy, {}).get(d_t.strftime('%Y-%m-%d'), 'NaN') return self.history.get(ccy, {}).get(d_t.strftime('%Y-%m-%d'), 'NaN')
async def request_history(self, ccy): def get_currencies(self):
raise NotImplementedError() # implemented by subclasses rates = self.get_rates('')
async def get_rates(self, ccy):
raise NotImplementedError() # implemented by subclasses
async def get_currencies(self):
rates = await self.get_rates('')
return sorted([str(a) for (a, b) in rates.items() if b is not None and len(a)==3]) return sorted([str(a) for (a, b) in rates.items() if b is not None and len(a)==3])
class BitcoinAverage(ExchangeBase): class BitcoinAverage(ExchangeBase):
# note: historical rates used to be freely available
# but this is no longer the case. see #5188
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('apiv2.bitcoinaverage.com', '/indices/global/ticker/short') json = self.get_json('apiv2.bitcoinaverage.com', '/indices/global/ticker/short')
return dict([(r.replace("BTC", ""), Decimal(json[r]['last'])) return dict([(r.replace("BTC", ""), Decimal(json[r]['last']))
for r in json if r != 'timestamp']) for r in json if r != 'timestamp'])
def history_ccys(self):
return ['AUD', 'BRL', 'CAD', 'CHF', 'CNY', 'EUR', 'GBP', 'IDR', 'ILS',
'MXN', 'NOK', 'NZD', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'USD',
'ZAR']
def request_history(self, ccy):
history = self.get_csv('apiv2.bitcoinaverage.com',
"/indices/global/history/BTC%s?period=alltime&format=csv" % ccy)
return dict([(h['DateTime'][:10], h['Average'])
for h in history])
class Bitcointoyou(ExchangeBase): class Bitcointoyou(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('bitcointoyou.com', "/API/ticker.aspx") json = self.get_json('bitcointoyou.com', "/API/ticker.aspx")
return {'BRL': Decimal(json['ticker']['last'])} return {'BRL': Decimal(json['ticker']['last'])}
def history_ccys(self):
return ['BRL']
class BitcoinVenezuela(ExchangeBase): class BitcoinVenezuela(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.bitcoinvenezuela.com', '/') json = self.get_json('api.bitcoinvenezuela.com', '/')
rates = [(r, json['BTC'][r]) for r in json['BTC'] rates = [(r, json['BTC'][r]) for r in json['BTC']
if json['BTC'][r] is not None] # Giving NULL for LTC if json['BTC'][r] is not None] # Giving NULL for LTC
return dict(rates) return dict(rates)
@ -173,108 +154,99 @@ class BitcoinVenezuela(ExchangeBase):
def history_ccys(self): def history_ccys(self):
return ['ARS', 'EUR', 'USD', 'VEF'] return ['ARS', 'EUR', 'USD', 'VEF']
async def request_history(self, ccy): def request_history(self, ccy):
json = await self.get_json('api.bitcoinvenezuela.com', return self.get_json('api.bitcoinvenezuela.com',
"/historical/index.php?coin=BTC") "/historical/index.php?coin=BTC")[ccy +'_BTC']
return json[ccy +'_BTC']
class Bitbank(ExchangeBase): class Bitbank(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('public.bitbank.cc', '/btc_jpy/ticker') json = self.get_json('public.bitbank.cc', '/btc_jpy/ticker')
return {'JPY': Decimal(json['data']['last'])} return {'JPY': Decimal(json['data']['last'])}
class BitFlyer(ExchangeBase): class BitFlyer(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('bitflyer.jp', '/api/echo/price') json = self.get_json('bitflyer.jp', '/api/echo/price')
return {'JPY': Decimal(json['mid'])} return {'JPY': Decimal(json['mid'])}
class Bitmarket(ExchangeBase):
def get_rates(self, ccy):
json = self.get_json('www.bitmarket.pl', '/json/BTCPLN/ticker.json')
return {'PLN': Decimal(json['last'])}
class BitPay(ExchangeBase): class BitPay(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('bitpay.com', '/api/rates') json = self.get_json('bitpay.com', '/api/rates')
return dict([(r['code'], Decimal(r['rate'])) for r in json]) return dict([(r['code'], Decimal(r['rate'])) for r in json])
class Bitso(ExchangeBase): class Bitso(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.bitso.com', '/v2/ticker') json = self.get_json('api.bitso.com', '/v2/ticker')
return {'MXN': Decimal(json['last'])} return {'MXN': Decimal(json['last'])}
class BitStamp(ExchangeBase): class BitStamp(ExchangeBase):
async def get_currencies(self): def get_rates(self, ccy):
return ['USD', 'EUR'] json = self.get_json('www.bitstamp.net', '/api/ticker/')
return {'USD': Decimal(json['last'])}
async def get_rates(self, ccy):
if ccy in CURRENCIES[self.name()]:
json = await self.get_json('www.bitstamp.net', f'/api/v2/ticker/btc{ccy.lower()}/')
return {ccy: Decimal(json['last'])}
return {}
class Bitvalor(ExchangeBase): class Bitvalor(ExchangeBase):
async def get_rates(self,ccy): def get_rates(self,ccy):
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json') json = self.get_json('api.bitvalor.com', '/v1/ticker.json')
return {'BRL': Decimal(json['ticker_1h']['total']['last'])} return {'BRL': Decimal(json['ticker_1h']['total']['last'])}
class BlockchainInfo(ExchangeBase): class BlockchainInfo(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('blockchain.info', '/ticker') json = self.get_json('blockchain.info', '/ticker')
return dict([(r, Decimal(json[r]['15m'])) for r in json]) return dict([(r, Decimal(json[r]['15m'])) for r in json])
class Bylls(ExchangeBase): class BTCChina(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('bylls.com', '/api/price?from_currency=BTC&to_currency=CAD') json = self.get_json('data.btcchina.com', '/data/ticker')
return {'CAD': Decimal(json['public_price']['to_price'])} return {'CNY': Decimal(json['ticker']['last'])}
class BTCParalelo(ExchangeBase):
def get_rates(self, ccy):
json = self.get_json('btcparalelo.com', '/api/price')
return {'VEF': Decimal(json['price'])}
class Coinbase(ExchangeBase): class Coinbase(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.coinbase.com', json = self.get_json('coinbase.com',
'/v2/exchange-rates?currency=BTC') '/api/v1/currencies/exchange_rates')
return {ccy: Decimal(rate) for (ccy, rate) in json["data"]["rates"].items()} return dict([(r[7:].upper(), Decimal(json[r]))
for r in json if r.startswith('btc_to_')])
class CoinCap(ExchangeBase):
async def get_rates(self, ccy):
json = await self.get_json('api.coincap.io', '/v2/rates/bitcoin/')
return {'USD': Decimal(json['data']['rateUsd'])}
def history_ccys(self):
return ['USD']
async def request_history(self, ccy):
# Currently 2000 days is the maximum in 1 API call
# (and history starts on 2017-03-23)
history = await self.get_json('api.coincap.io',
'/v2/assets/bitcoin/history?interval=d1&limit=2000')
return dict([(datetime.utcfromtimestamp(h['time']/1000).strftime('%Y-%m-%d'), h['priceUsd'])
for h in history['data']])
class CoinDesk(ExchangeBase): class CoinDesk(ExchangeBase):
async def get_currencies(self): def get_currencies(self):
dicts = await self.get_json('api.coindesk.com', dicts = self.get_json('api.coindesk.com',
'/v1/bpi/supported-currencies.json') '/v1/bpi/supported-currencies.json')
return [d['currency'] for d in dicts] return [d['currency'] for d in dicts]
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.coindesk.com', json = self.get_json('api.coindesk.com',
'/v1/bpi/currentprice/%s.json' % ccy) '/v1/bpi/currentprice/%s.json' % ccy)
result = {ccy: Decimal(json['bpi'][ccy]['rate_float'])} result = {ccy: Decimal(json['bpi'][ccy]['rate_float'])}
return result return result
@ -285,40 +257,35 @@ class CoinDesk(ExchangeBase):
def history_ccys(self): def history_ccys(self):
return self.history_starts().keys() return self.history_starts().keys()
async def request_history(self, ccy): def request_history(self, ccy):
start = self.history_starts()[ccy] start = self.history_starts()[ccy]
end = datetime.today().strftime('%Y-%m-%d') end = datetime.today().strftime('%Y-%m-%d')
# Note ?currency and ?index don't work as documented. Sigh. # Note ?currency and ?index don't work as documented. Sigh.
query = ('/v1/bpi/historical/close.json?start=%s&end=%s' query = ('/v1/bpi/historical/close.json?start=%s&end=%s'
% (start, end)) % (start, end))
json = await self.get_json('api.coindesk.com', query) json = self.get_json('api.coindesk.com', query)
return json['bpi'] return json['bpi']
class CoinGecko(ExchangeBase): class Coinsecure(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.coingecko.com', '/api/v3/exchange_rates') json = self.get_json('api.coinsecure.in', '/v0/noauth/newticker')
return dict([(ccy.upper(), Decimal(d['value'])) return {'INR': Decimal(json['lastprice'] / 100.0 )}
for ccy, d in json['rates'].items()])
def history_ccys(self):
# CoinGecko seems to have historical data for all ccys it supports
return CURRENCIES[self.name()]
async def request_history(self, ccy): class Foxbit(ExchangeBase):
history = await self.get_json('api.coingecko.com',
'/api/v3/coins/bitcoin/market_chart?vs_currency=%s&days=max' % ccy)
return dict([(datetime.utcfromtimestamp(h[0]/1000).strftime('%Y-%m-%d'), h[1]) def get_rates(self,ccy):
for h in history['prices']]) json = self.get_json('api.bitvalor.com', '/v1/ticker.json')
return {'BRL': Decimal(json['ticker_1h']['exchanges']['FOX']['last'])}
class itBit(ExchangeBase): class itBit(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
ccys = ['USD', 'EUR', 'SGD'] ccys = ['USD', 'EUR', 'SGD']
json = await self.get_json('api.itbit.com', '/v1/markets/XBT%s/ticker' % ccy) json = self.get_json('api.itbit.com', '/v1/markets/XBT%s/ticker' % ccy)
result = dict.fromkeys(ccys) result = dict.fromkeys(ccys)
if ccy in ccys: if ccy in ccys:
result[ccy] = Decimal(json['lastPrice']) result[ccy] = Decimal(json['lastPrice'])
@ -327,10 +294,10 @@ class itBit(ExchangeBase):
class Kraken(ExchangeBase): class Kraken(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
ccys = ['EUR', 'USD', 'CAD', 'GBP', 'JPY'] ccys = ['EUR', 'USD', 'CAD', 'GBP', 'JPY']
pairs = ['XBT%s' % c for c in ccys] pairs = ['XBT%s' % c for c in ccys]
json = await self.get_json('api.kraken.com', json = self.get_json('api.kraken.com',
'/0/public/Ticker?pair=%s' % ','.join(pairs)) '/0/public/Ticker?pair=%s' % ','.join(pairs))
return dict((k[-3:], Decimal(float(v['c'][0]))) return dict((k[-3:], Decimal(float(v['c'][0])))
for k, v in json['result'].items()) for k, v in json['result'].items())
@ -338,44 +305,61 @@ class Kraken(ExchangeBase):
class LocalBitcoins(ExchangeBase): class LocalBitcoins(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('localbitcoins.com', json = self.get_json('localbitcoins.com',
'/bitcoinaverage/ticker-all-currencies/') '/bitcoinaverage/ticker-all-currencies/')
return dict([(r, Decimal(json[r]['rates']['last'])) for r in json]) return dict([(r, Decimal(json[r]['rates']['last'])) for r in json])
class MercadoBitcoin(ExchangeBase): class MercadoBitcoin(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json') json = self.get_json('api.bitvalor.com', '/v1/ticker.json')
return {'BRL': Decimal(json['ticker_1h']['exchanges']['MBT']['last'])} return {'BRL': Decimal(json['ticker_1h']['exchanges']['MBT']['last'])}
class NegocieCoins(ExchangeBase): class NegocieCoins(ExchangeBase):
async def get_rates(self,ccy): def get_rates(self,ccy):
json = await self.get_json('api.bitvalor.com', '/v1/ticker.json') json = self.get_json('api.bitvalor.com', '/v1/ticker.json')
return {'BRL': Decimal(json['ticker_1h']['exchanges']['NEG']['last'])} return {'BRL': Decimal(json['ticker_1h']['exchanges']['NEG']['last'])}
class TheRockTrading(ExchangeBase): class TheRockTrading(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.therocktrading.com', json = self.get_json('api.therocktrading.com',
'/v1/funds/BTCEUR/ticker') '/v1/funds/BTCEUR/ticker')
return {'EUR': Decimal(json['last'])} return {'EUR': Decimal(json['last'])}
class Unocoin(ExchangeBase):
def get_rates(self, ccy):
json = self.get_json('www.unocoin.com', 'trade?buy')
return {'INR': Decimal(json)}
class WEX(ExchangeBase):
def get_rates(self, ccy):
json_eur = self.get_json('wex.nz', '/api/3/ticker/btc_eur')
json_rub = self.get_json('wex.nz', '/api/3/ticker/btc_rur')
json_usd = self.get_json('wex.nz', '/api/3/ticker/btc_usd')
return {'EUR': Decimal(json_eur['btc_eur']['last']),
'RUB': Decimal(json_rub['btc_rur']['last']),
'USD': Decimal(json_usd['btc_usd']['last'])}
class Winkdex(ExchangeBase): class Winkdex(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('winkdex.com', '/api/v0/price') json = self.get_json('winkdex.com', '/api/v0/price')
return {'USD': Decimal(json['price'] / 100.0)} return {'USD': Decimal(json['price'] / 100.0)}
def history_ccys(self): def history_ccys(self):
return ['USD'] return ['USD']
async def request_history(self, ccy): def request_history(self, ccy):
json = await self.get_json('winkdex.com', json = self.get_json('winkdex.com',
"/api/v0/series?start_time=1342915200") "/api/v0/series?start_time=1342915200")
history = json['series'][0]['results'] history = json['series'][0]['results']
return dict([(h['timestamp'][:10], h['price'] / 100.0) return dict([(h['timestamp'][:10], h['price'] / 100.0)
@ -383,8 +367,8 @@ class Winkdex(ExchangeBase):
class Zaif(ExchangeBase): class Zaif(ExchangeBase):
async def get_rates(self, ccy): def get_rates(self, ccy):
json = await self.get_json('api.zaif.jp', '/api/1/last_price/btc_jpy') json = self.get_json('api.zaif.jp', '/api/1/last_price/btc_jpy')
return {'JPY': Decimal(json['last_price'])} return {'JPY': Decimal(json['last_price'])}
@ -397,39 +381,26 @@ def dictinvert(d):
return inv return inv
def get_exchanges_and_currencies(): def get_exchanges_and_currencies():
# load currencies.json from disk import os, json
path = resource_path('currencies.json') path = os.path.join(os.path.dirname(__file__), 'currencies.json')
try: try:
with open(path, 'r', encoding='utf-8') as f: with open(path, 'r', encoding='utf-8') as f:
return json.loads(f.read()) return json.loads(f.read())
except: except:
pass pass
# or if not present, generate it now.
print("cannot find currencies.json. will regenerate it now.")
d = {} d = {}
is_exchange = lambda obj: (inspect.isclass(obj) is_exchange = lambda obj: (inspect.isclass(obj)
and issubclass(obj, ExchangeBase) and issubclass(obj, ExchangeBase)
and obj != ExchangeBase) and obj != ExchangeBase)
exchanges = dict(inspect.getmembers(sys.modules[__name__], is_exchange)) exchanges = dict(inspect.getmembers(sys.modules[__name__], is_exchange))
for name, klass in exchanges.items():
async def get_currencies_safe(name, exchange): exchange = klass(None, None)
try: try:
d[name] = await exchange.get_currencies() d[name] = exchange.get_currencies()
print(name, "ok") print(name, "ok")
except: except:
print(name, "error") print(name, "error")
continue
async def query_all_exchanges_for_their_ccys_over_network():
async with timeout_after(10):
async with TaskGroup() as group:
for name, klass in exchanges.items():
exchange = klass(None, None)
await group.spawn(get_currencies_safe(name, exchange))
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(query_all_exchanges_for_their_ccys_over_network())
except Exception as e:
pass
with open(path, 'w', encoding='utf-8') as f: with open(path, 'w', encoding='utf-8') as f:
f.write(json.dumps(d, indent=4, sort_keys=True)) f.write(json.dumps(d, indent=4, sort_keys=True))
return d return d
@ -452,72 +423,51 @@ def get_exchanges_by_ccy(history=True):
class FxThread(ThreadJob): class FxThread(ThreadJob):
def __init__(self, config: SimpleConfig, network: Network): def __init__(self, config, network):
ThreadJob.__init__(self)
self.config = config self.config = config
self.network = network self.network = network
if self.network:
self.network.register_callback(self.set_proxy, ['proxy_set'])
self.ccy = self.get_currency() self.ccy = self.get_currency()
self.history_used_spot = False self.history_used_spot = False
self.ccy_combo = None self.ccy_combo = None
self.hist_checkbox = None self.hist_checkbox = None
self.cache_dir = os.path.join(config.path, 'cache') self.cache_dir = os.path.join(config.path, 'cache')
self._trigger = asyncio.Event()
self._trigger.set()
self.set_exchange(self.config_exchange()) self.set_exchange(self.config_exchange())
make_dir(self.cache_dir) make_dir(self.cache_dir)
def set_proxy(self, trigger_name, *args): def get_currencies(self, h):
self._trigger.set() d = get_exchanges_by_ccy(h)
@staticmethod
def get_currencies(history: bool) -> Sequence[str]:
d = get_exchanges_by_ccy(history)
return sorted(d.keys()) return sorted(d.keys())
@staticmethod def get_exchanges_by_ccy(self, ccy, h):
def get_exchanges_by_ccy(ccy: str, history: bool) -> Sequence[str]: d = get_exchanges_by_ccy(h)
d = get_exchanges_by_ccy(history)
return d.get(ccy, []) return d.get(ccy, [])
@staticmethod
def remove_thousands_separator(text):
return text.replace(',', '') # FIXME use THOUSAND_SEPARATOR in util
def ccy_amount_str(self, amount, commas): def ccy_amount_str(self, amount, commas):
prec = CCY_PRECISIONS.get(self.ccy, 2) prec = CCY_PRECISIONS.get(self.ccy, 2)
fmt_str = "{:%s.%df}" % ("," if commas else "", max(0, prec)) # FIXME use util.THOUSAND_SEPARATOR and util.DECIMAL_POINT fmt_str = "{:%s.%df}" % ("," if commas else "", max(0, prec))
try: try:
rounded_amount = round(amount, prec) rounded_amount = round(amount, prec)
except decimal.InvalidOperation: except decimal.InvalidOperation:
rounded_amount = amount rounded_amount = amount
return fmt_str.format(rounded_amount) return fmt_str.format(rounded_amount)
async def run(self): def run(self):
while True: # This runs from the plugins thread which catches exceptions
# approx. every 2.5 minutes, refresh spot price if self.is_enabled():
try: if self.timeout ==0 and self.show_history():
async with timeout_after(150): self.exchange.get_historical_rates(self.ccy, self.cache_dir)
await self._trigger.wait() if self.timeout <= time.time():
self._trigger.clear() self.timeout = time.time() + 150
# we were manually triggered, so get historical rates self.exchange.update(self.ccy)
if self.is_enabled() and self.show_history():
self.exchange.get_historical_rates(self.ccy, self.cache_dir)
except TaskTimeout:
pass
if self.is_enabled():
await self.exchange.update_safe(self.ccy)
def is_enabled(self): def is_enabled(self):
return bool(self.config.get('use_exchange_rate', DEFAULT_ENABLED)) return bool(self.config.get('use_exchange_rate'))
def set_enabled(self, b): def set_enabled(self, b):
self.config.set_key('use_exchange_rate', bool(b)) return self.config.set_key('use_exchange_rate', bool(b))
self.trigger_update()
def get_history_config(self, *, default=False): def get_history_config(self):
return bool(self.config.get('history_rates', default)) return bool(self.config.get('history_rates'))
def set_history_config(self, b): def set_history_config(self, b):
self.config.set_key('history_rates', bool(b)) self.config.set_key('history_rates', bool(b))
@ -536,10 +486,10 @@ class FxThread(ThreadJob):
def get_currency(self): def get_currency(self):
'''Use when dynamic fetching is needed''' '''Use when dynamic fetching is needed'''
return self.config.get("currency", DEFAULT_CURRENCY) return self.config.get("currency", "EUR")
def config_exchange(self): def config_exchange(self):
return self.config.get('use_exchange', DEFAULT_EXCHANGE) return self.config.get('use_exchange', 'BitcoinAverage')
def show_history(self): def show_history(self):
return self.is_enabled() and self.get_history_config() and self.ccy in self.exchange.history_ccys() return self.is_enabled() and self.get_history_config() and self.ccy in self.exchange.history_ccys()
@ -547,23 +497,18 @@ class FxThread(ThreadJob):
def set_currency(self, ccy): def set_currency(self, ccy):
self.ccy = ccy self.ccy = ccy
self.config.set_key('currency', ccy, True) self.config.set_key('currency', ccy, True)
self.trigger_update() self.timeout = 0 # Because self.ccy changes
self.on_quotes() self.on_quotes()
def trigger_update(self):
if self.network:
self.network.asyncio_loop.call_soon_threadsafe(self._trigger.set)
def set_exchange(self, name): def set_exchange(self, name):
class_ = globals().get(name) or globals().get(DEFAULT_EXCHANGE) class_ = globals().get(name, BitcoinAverage)
self.logger.info(f"using exchange {name}") self.print_error("using exchange", name)
if self.config_exchange() != name: if self.config_exchange() != name:
self.config.set_key('use_exchange', name, True) self.config.set_key('use_exchange', name, True)
assert issubclass(class_, ExchangeBase), f"unexpected type {class_} for {name}" self.exchange = class_(self.on_quotes, self.on_history)
self.exchange = class_(self.on_quotes, self.on_history) # type: ExchangeBase
# A new exchange means new fx quotes, initially empty. Force # A new exchange means new fx quotes, initially empty. Force
# a quote refresh # a quote refresh
self.trigger_update() self.timeout = 0
self.exchange.read_historical_rates(self.ccy, self.cache_dir) self.exchange.read_historical_rates(self.ccy, self.cache_dir)
def on_quotes(self): def on_quotes(self):
@ -574,8 +519,8 @@ class FxThread(ThreadJob):
if self.network: if self.network:
self.network.trigger_callback('on_history') self.network.trigger_callback('on_history')
def exchange_rate(self) -> Decimal: def exchange_rate(self):
"""Returns the exchange rate as a Decimal""" '''Returns None, or the exchange rate as a Decimal'''
rate = self.exchange.quotes.get(self.ccy) rate = self.exchange.quotes.get(self.ccy)
if rate is None: if rate is None:
return Decimal('NaN') return Decimal('NaN')
@ -611,11 +556,9 @@ class FxThread(ThreadJob):
rate = self.exchange.historical_rate(self.ccy, d_t) rate = self.exchange.historical_rate(self.ccy, d_t)
# Frequently there is no rate for today, until tomorrow :) # Frequently there is no rate for today, until tomorrow :)
# Use spot quotes in that case # Use spot quotes in that case
if rate in ('NaN', None) and (datetime.today().date() - d_t.date()).days <= 2: if rate == 'NaN' and (datetime.today().date() - d_t.date()).days <= 2:
rate = self.exchange.quotes.get(self.ccy, 'NaN') rate = self.exchange.quotes.get(self.ccy, 'NaN')
self.history_used_spot = True self.history_used_spot = True
if rate is None:
rate = 'NaN'
return Decimal(rate) return Decimal(rate)
def historical_value_str(self, satoshis, d_t): def historical_value_str(self, satoshis, d_t):
@ -628,6 +571,3 @@ class FxThread(ThreadJob):
from .util import timestamp_to_datetime from .util import timestamp_to_datetime
date = timestamp_to_datetime(timestamp) date = timestamp_to_datetime(timestamp)
return self.history_rate(date) return self.history_rate(date)
assert globals().get(DEFAULT_EXCHANGE), f"default exchange {DEFAULT_EXCHANGE} does not exist"

View file

@ -1,5 +1,5 @@
# To create a new GUI, please add its code to this directory. # To create a new GUI, please add its code to this directory.
# Three objects are passed to the ElectrumGui: config, daemon and plugins # Three objects are passed to the ElectrumGui: config, daemon and plugins
# The Wallet object is instantiated by the GUI # The Wallet object is instanciated by the GUI
# Notifications about network events are sent to the GUI by using network.register_callback() # Notifications about network events are sent to the GUI by using network.register_callback()

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 687 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.3 KiB

Some files were not shown because too many files have changed in this diff Show more