commit e6bec51a7653c03854a727c7f261431165b6a5e7
parent e9b4b3720276783029054ed9d93a5057a227fa08
Author: chris-belcher <chris-belcher@users.noreply.github.com>
Date: Tue, 2 Oct 2018 00:53:12 +0100
Merge pull request #44 from suvayu/master
Make EPS installable with pip
Diffstat:
39 files changed, 2891 insertions(+), 2760 deletions(-)
diff --git a/MANIFEST.in b/MANIFEST.in
@@ -0,0 +1,4 @@
+include README.md
+include doc/*.md
+include electrumpersonalserver/certs/*
+include config.cfg_sample
diff --git a/README.md b/README.md
@@ -60,11 +60,16 @@ and `[watch-only-addresses]` sections. Master public keys for an Electrum wallet
(which start with xpub/ypub/zpub) can be found in the Electrum client menu
`Wallet` -> `Information`.
-* Run `./server.py` on Linux or double-click `run-server.bat` on Windows.
-The first time the server is run it will import all configured addresses as
-watch-only into the Bitcoin node, and then exit. If the wallets contain
-historical transactions you can use the rescan script (`./rescan-script.py` or
-`rescan-script.bat`) to make them appear.
+* Install Electrum Personal Server in your home directory with
+ `pip3 install --user .`. On Linux the scripts
+ (`electrum-personal-server` and `electrum-personal-server-rescan`) will be
+ installed in `~/.local/bin`.
+
+* Run `electrum-personal-server -c /path/to/config.cfg` to start Electrum
+ Personal Server. The first time the server is run it will import all
+ configured addresses as watch-only into the Bitcoin node, and then exit.
+ If the wallets contain historical transactions you can use the rescan script
+ (`electrum-personal-server-rescan -c /path/to/config.cfg`) to make them appear.
* Run the server again which will start Electrum Personal Server. Tell Electrum
wallet to connect to it in `Tools` -> `Server`. By default the server details
@@ -155,6 +160,18 @@ I can be contacted on freenode IRC on the `#bitcoin` and `#electrum` channels, b
My PGP key fingerprint is: `0A8B 038F 5E10 CC27 89BF CFFF EF73 4EA6 77F3 1129`.
+### Notes for developers
+
+To seamlessly work on the codebase while using `pip`, you need to
+install in the `develop`/`editable` mode. You can do that with:
+
+ $ pip3 install --user -e /path/to/repo
+
+`/path/to/repo` can also be a relative path, so if you are in the
+source directory, just use `.`. This installs the scripts in the
+usual places, but imports the package from the source directory. This
+way, any changes you make are immediately visible.
+
#### Testing
Electrum Personal Server also works on [testnet](https://en.bitcoin.it/wiki/Testnet)
@@ -162,7 +179,7 @@ and [regtest](https://bitcoin.org/en/glossary/regression-test-mode). The
Electrum wallet can be started in testnet mode with the command line flag
`--testnet` or `--regtest`.
-pytest is used for automated testing. On Debian-like systems install with
+pytest is used for automated testing. On Debian-like systems install with
`pip3 install pytest pytest-cov`
Run the tests with:
@@ -174,6 +191,11 @@ Create the coverage report with:
$ PYTHONPATH=.:$PYTHONPATH py.test-3 --cov-report=html --cov
$ open htmlcov/index.html
+If you have installed Electrum Personal Server with pip, there is no
+need to set `PYTHONPATH`. You could also run the tests with:
+
+ $ python3 setup.py test
+
## Media Coverage
* https://bitcoinmagazine.com/articles/electrum-personal-server-will-give-users-full-node-security-they-need/
diff --git a/bitcoin/__init__.py b/bitcoin/__init__.py
@@ -1,13 +0,0 @@
-from bitcoin.py2specials import *
-from bitcoin.py3specials import *
-secp_present = False
-try:
- import secp256k1
- secp_present = True
- from bitcoin.secp256k1_main import *
- from bitcoin.secp256k1_transaction import *
- from bitcoin.secp256k1_deterministic import *
-except ImportError as e:
- from bitcoin.main import *
- from bitcoin.deterministic import *
- from bitcoin.transaction import *
diff --git a/bitcoin/deterministic.py b/bitcoin/deterministic.py
@@ -1,183 +0,0 @@
-from bitcoin.main import *
-import hmac
-import hashlib
-from binascii import hexlify
-
-# Below code ASSUMES binary inputs and compressed pubkeys
-MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
-#MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
-TESTNET_PRIVATE = b'\x04\x35\x83\x94'
-#TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
-PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
-#PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
-
-#updated for electrum's bip32 version bytes
-#only public keys because electrum personal server only needs them
-#https://github.com/spesmilo/electrum-docs/blob/master/xpub_version_bytes.rst
-PUBLIC = [ b'\x04\x88\xb2\x1e', #mainnet p2pkh or p2sh xpub
- b'\x04\x9d\x7c\xb2', #mainnet p2wpkh-p2sh ypub
- b'\x02\x95\xb4\x3f', #mainnet p2wsh-p2sh Ypub
- b'\x04\xb2\x47\x46', #mainnet p2wpkh zpub
- b'\x02\xaa\x7e\xd3', #mainnet p2wsh Zpub
- b'\x04\x35\x87\xcf', #testnet p2pkh or p2sh tpub
- b'\x04\x4a\x52\x62', #testnet p2wpkh-p2sh upub
- b'\x02\x42\x89\xef', #testnet p2wsh-p2sh Upub
- b'\x04\x5f\x1c\xf6', #testnet p2wpkh vpub
- b'\x02\x57\x54\x83' #testnet p2wsh Vpub
- ]
-
-# BIP32 child key derivation
-
-def raw_bip32_ckd(rawtuple, i):
- vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
- i = int(i)
-
- if vbytes in PRIVATE:
- priv = key
- pub = privtopub(key)
- else:
- pub = key
-
- if i >= 2**31:
- if vbytes in PUBLIC:
- raise Exception("Can't do private derivation on public key!")
- I = hmac.new(chaincode, b'\x00' + priv[:32] + encode(i, 256, 4),
- hashlib.sha512).digest()
- else:
- I = hmac.new(chaincode, pub + encode(i, 256, 4),
- hashlib.sha512).digest()
-
- if vbytes in PRIVATE:
- newkey = add_privkeys(I[:32] + B'\x01', priv)
- fingerprint = bin_hash160(privtopub(key))[:4]
- if vbytes in PUBLIC:
- newkey = add_pubkeys(compress(privtopub(I[:32])), key)
- fingerprint = bin_hash160(key)[:4]
-
- return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
-
-
-def bip32_serialize(rawtuple):
- vbytes, depth, fingerprint, i, chaincode, key = rawtuple
- i = encode(i, 256, 4)
- chaincode = encode(hash_to_int(chaincode), 256, 32)
- keydata = b'\x00' + key[:-1] if vbytes in PRIVATE else key
- bindata = vbytes + from_int_to_byte(
- depth % 256) + fingerprint + i + chaincode + keydata
- return changebase(bindata + bin_dbl_sha256(bindata)[:4], 256, 58)
-
-
-def bip32_deserialize(data):
- dbin = changebase(data, 58, 256)
- if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
- raise Exception("Invalid checksum")
- vbytes = dbin[0:4]
- depth = from_byte_to_int(dbin[4])
- fingerprint = dbin[5:9]
- i = decode(dbin[9:13], 256)
- chaincode = dbin[13:45]
- key = dbin[46:78] + b'\x01' if vbytes in PRIVATE else dbin[45:78]
- return (vbytes, depth, fingerprint, i, chaincode, key)
-
-
-def raw_bip32_privtopub(rawtuple):
- vbytes, depth, fingerprint, i, chaincode, key = rawtuple
- newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
- return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key))
-
-
-def bip32_privtopub(data):
- return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
-
-
-def bip32_ckd(data, i):
- return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
-
-
-def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
- I = hmac.new(
- from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
- return bip32_serialize((vbytes, 0, b'\x00' * 4, 0, I[32:], I[:32] + b'\x01'
- ))
-
-
-def bip32_bin_extract_key(data):
- return bip32_deserialize(data)[-1]
-
-
-def bip32_extract_key(data):
- return safe_hexlify(bip32_deserialize(data)[-1])
-
-# Exploits the same vulnerability as above in Electrum wallets
-# Takes a BIP32 pubkey and one of the child privkeys of its corresponding
-# privkey and returns the BIP32 privkey associated with that pubkey
-
-def raw_crack_bip32_privkey(parent_pub, priv):
- vbytes, depth, fingerprint, i, chaincode, key = priv
- pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub
- i = int(i)
-
- if i >= 2**31:
- raise Exception("Can't crack private derivation!")
-
- I = hmac.new(pchaincode, pkey + encode(i, 256, 4), hashlib.sha512).digest()
-
- pprivkey = subtract_privkeys(key, I[:32] + b'\x01')
-
- newvbytes = MAINNET_PRIVATE if vbytes == MAINNET_PUBLIC else TESTNET_PRIVATE
- return (newvbytes, pdepth, pfingerprint, pi, pchaincode, pprivkey)
-
-
-def crack_bip32_privkey(parent_pub, priv):
- dsppub = bip32_deserialize(parent_pub)
- dspriv = bip32_deserialize(priv)
- return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv))
-
-def bip32_descend(*args):
- if len(args) == 2:
- key, path = args
- else:
- key, path = args[0], map(int, args[1:])
- for p in path:
- key = bip32_ckd(key, p)
- return bip32_extract_key(key)
-
-# electrum
-def electrum_stretch(seed):
- return slowsha(seed)
-
-# Accepts seed or stretched seed, returns master public key
-
-def electrum_mpk(seed):
- if len(seed) == 32:
- seed = electrum_stretch(seed)
- return privkey_to_pubkey(seed)[2:]
-
-# Accepts (seed or stretched seed), index and secondary index
-# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey
-
-
-def electrum_privkey(seed, n, for_change=0):
- if len(seed) == 32:
- seed = electrum_stretch(seed)
- mpk = electrum_mpk(seed)
- offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+
- from_int_representation_to_bytes(for_change)+b':'+
- binascii.unhexlify(mpk))
- return add_privkeys(seed, offset)
-
-# Accepts (seed or stretched seed or master pubkey), index and secondary index
-# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey
-
-def electrum_pubkey(masterkey, n, for_change=0):
- if len(masterkey) == 32:
- mpk = electrum_mpk(electrum_stretch(masterkey))
- elif len(masterkey) == 64:
- mpk = electrum_mpk(masterkey)
- else:
- mpk = masterkey
- bin_mpk = encode_pubkey(mpk, 'bin_electrum')
- offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+
- from_int_representation_to_bytes(for_change)+b':'+bin_mpk)
- return add_pubkeys('04'+mpk, privtopub(offset))
-
diff --git a/bitcoin/secp256k1_deterministic.py b/bitcoin/secp256k1_deterministic.py
@@ -1,92 +0,0 @@
-from bitcoin.secp256k1_main import *
-import hmac
-import hashlib
-from binascii import hexlify
-
-# Below code ASSUMES binary inputs and compressed pubkeys
-MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
-MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
-TESTNET_PRIVATE = b'\x04\x35\x83\x94'
-TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
-PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
-PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
-
-# BIP32 child key derivation
-
-def raw_bip32_ckd(rawtuple, i):
- vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
- i = int(i)
-
- if vbytes in PRIVATE:
- priv = key
- pub = privtopub(key, False)
- else:
- pub = key
-
- if i >= 2**31:
- if vbytes in PUBLIC:
- raise Exception("Can't do private derivation on public key!")
- I = hmac.new(chaincode, b'\x00' + priv[:32] + encode(i, 256, 4),
- hashlib.sha512).digest()
- else:
- I = hmac.new(chaincode, pub + encode(i, 256, 4),
- hashlib.sha512).digest()
-
- if vbytes in PRIVATE:
- newkey = add_privkeys(I[:32] + B'\x01', priv, False)
- fingerprint = bin_hash160(privtopub(key, False))[:4]
- if vbytes in PUBLIC:
- newkey = add_pubkeys([privtopub(I[:32] + '\x01', False), key], False)
- fingerprint = bin_hash160(key)[:4]
-
- return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
-
-def bip32_serialize(rawtuple):
- vbytes, depth, fingerprint, i, chaincode, key = rawtuple
- i = encode(i, 256, 4)
- chaincode = encode(hash_to_int(chaincode), 256, 32)
- keydata = b'\x00' + key[:-1] if vbytes in PRIVATE else key
- bindata = vbytes + from_int_to_byte(
- depth % 256) + fingerprint + i + chaincode + keydata
- return changebase(bindata + bin_dbl_sha256(bindata)[:4], 256, 58)
-
-def bip32_deserialize(data):
- dbin = changebase(data, 58, 256)
- if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
- raise Exception("Invalid checksum")
- vbytes = dbin[0:4]
- depth = from_byte_to_int(dbin[4])
- fingerprint = dbin[5:9]
- i = decode(dbin[9:13], 256)
- chaincode = dbin[13:45]
- key = dbin[46:78] + b'\x01' if vbytes in PRIVATE else dbin[45:78]
- return (vbytes, depth, fingerprint, i, chaincode, key)
-
-def raw_bip32_privtopub(rawtuple):
- vbytes, depth, fingerprint, i, chaincode, key = rawtuple
- newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
- return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key, False))
-
-def bip32_privtopub(data):
- return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
-
-def bip32_ckd(data, i):
- return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
-
-def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
- I = hmac.new(
- from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
- return bip32_serialize((vbytes, 0, b'\x00' * 4, 0, I[32:], I[:32] + b'\x01'
- ))
-
-def bip32_extract_key(data):
- return safe_hexlify(bip32_deserialize(data)[-1])
-
-def bip32_descend(*args):
- if len(args) == 2:
- key, path = args
- else:
- key, path = args[0], map(int, args[1:])
- for p in path:
- key = bip32_ckd(key, p)
- return bip32_extract_key(key)
diff --git a/bitcoin/secp256k1_transaction.py b/bitcoin/secp256k1_transaction.py
@@ -1,452 +0,0 @@
-#!/usr/bin/python
-import binascii, re, json, copy, sys
-from bitcoin.secp256k1_main import *
-from _functools import reduce
-import os
-
-is_python2 = sys.version_info.major == 2
-
-### Hex to bin converter and vice versa for objects
-def json_is_base(obj, base):
- if not is_python2 and isinstance(obj, bytes):
- return False
-
- alpha = get_code_string(base)
- if isinstance(obj, string_types):
- for i in range(len(obj)):
- if alpha.find(obj[i]) == -1:
- return False
- return True
- elif isinstance(obj, int_types) or obj is None:
- return True
- elif isinstance(obj, list):
- for i in range(len(obj)):
- if not json_is_base(obj[i], base):
- return False
- return True
- else:
- for x in obj:
- if not json_is_base(obj[x], base):
- return False
- return True
-
-
-def json_changebase(obj, changer):
- if isinstance(obj, string_or_bytes_types):
- return changer(obj)
- elif isinstance(obj, int_types) or obj is None:
- return obj
- elif isinstance(obj, list):
- return [json_changebase(x, changer) for x in obj]
- return dict((x, json_changebase(obj[x], changer)) for x in obj)
-
-# Transaction serialization and deserialization
-
-
-def deserialize(tx):
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- #tx = bytes(bytearray.fromhex(tx))
- return json_changebase(
- deserialize(binascii.unhexlify(tx)), lambda x: safe_hexlify(x))
- # http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
- # Python's scoping rules are demented, requiring me to make pos an object
- # so that it is call-by-reference
- pos = [0]
-
- def read_as_int(bytez):
- pos[0] += bytez
- return decode(tx[pos[0] - bytez:pos[0]][::-1], 256)
-
- def read_var_int():
- pos[0] += 1
-
- val = from_byte_to_int(tx[pos[0] - 1])
- if val < 253:
- return val
- return read_as_int(pow(2, val - 252))
-
- def read_bytes(bytez):
- pos[0] += bytez
- return tx[pos[0] - bytez:pos[0]]
-
- def read_var_string():
- size = read_var_int()
- return read_bytes(size)
-
- obj = {"ins": [], "outs": []}
- obj["version"] = read_as_int(4)
- ins = read_var_int()
- for i in range(ins):
- obj["ins"].append({
- "outpoint": {
- "hash": read_bytes(32)[::-1],
- "index": read_as_int(4)
- },
- "script": read_var_string(),
- "sequence": read_as_int(4)
- })
- outs = read_var_int()
- for i in range(outs):
- obj["outs"].append({
- "value": read_as_int(8),
- "script": read_var_string()
- })
- obj["locktime"] = read_as_int(4)
- return obj
-
-
-def serialize(txobj):
- #if isinstance(txobj, bytes):
- # txobj = bytes_to_hex_string(txobj)
- o = []
- if json_is_base(txobj, 16):
- json_changedbase = json_changebase(txobj,
- lambda x: binascii.unhexlify(x))
- hexlified = safe_hexlify(serialize(json_changedbase))
- return hexlified
- o.append(encode(txobj["version"], 256, 4)[::-1])
- o.append(num_to_var_int(len(txobj["ins"])))
- for inp in txobj["ins"]:
- o.append(inp["outpoint"]["hash"][::-1])
- o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
- o.append(num_to_var_int(len(inp["script"])) + (inp["script"] if inp[
- "script"] or is_python2 else bytes()))
- o.append(encode(inp["sequence"], 256, 4)[::-1])
- o.append(num_to_var_int(len(txobj["outs"])))
- for out in txobj["outs"]:
- o.append(encode(out["value"], 256, 8)[::-1])
- o.append(num_to_var_int(len(out["script"])) + out["script"])
- o.append(encode(txobj["locktime"], 256, 4)[::-1])
-
- return ''.join(o) if is_python2 else reduce(lambda x, y: x + y, o, bytes())
-
-# Hashing transactions for signing
-
-SIGHASH_ALL = 1
-SIGHASH_NONE = 2
-SIGHASH_SINGLE = 3
-SIGHASH_ANYONECANPAY = 0x80
-
-def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
- i, hashcode = int(i), int(hashcode)
- if isinstance(tx, string_or_bytes_types):
- return serialize(signature_form(deserialize(tx), i, script, hashcode))
- newtx = copy.deepcopy(tx)
- for inp in newtx["ins"]:
- inp["script"] = ""
- newtx["ins"][i]["script"] = script
- if hashcode & 0x1f == SIGHASH_NONE:
- newtx["outs"] = []
- for j, inp in enumerate(newtx["ins"]):
- if j != i:
- inp["sequence"] = 0
- elif hashcode & 0x1f == SIGHASH_SINGLE:
- if len(newtx["ins"]) > len(newtx["outs"]):
- raise Exception(
- "Transactions with sighash single should have len in <= len out")
- newtx["outs"] = newtx["outs"][:i+1]
- for out in newtx["outs"][:i]:
- out['value'] = 2**64 - 1
- out['script'] = ""
- for j, inp in enumerate(newtx["ins"]):
- if j != i:
- inp["sequence"] = 0
- if hashcode & SIGHASH_ANYONECANPAY:
- newtx["ins"] = [newtx["ins"][i]]
- else:
- pass
- return newtx
-
-def txhash(tx, hashcode=None):
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- tx = changebase(tx, 16, 256)
- if hashcode:
- return dbl_sha256(from_string_to_bytes(tx) + encode(
- int(hashcode), 256, 4)[::-1])
- else:
- return safe_hexlify(bin_dbl_sha256(tx)[::-1])
-
-
-def bin_txhash(tx, hashcode=None):
- return binascii.unhexlify(txhash(tx, hashcode))
-
-
-def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL, usenonce=None):
- sig = ecdsa_raw_sign(
- txhash(tx, hashcode),
- priv,
- True,
- rawmsg=True,
- usenonce=usenonce)
- return sig + encode(hashcode, 16, 2)
-
-
-def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
- return ecdsa_raw_verify(
- txhash(tx, hashcode),
- pub,
- sig[:-2],
- True,
- rawmsg=True)
-
-# Scripts
-
-
-def mk_pubkey_script(addr):
- # Keep the auxiliary functions around for altcoins' sake
- return '76a914' + b58check_to_hex(addr) + '88ac'
-
-
-def mk_scripthash_script(addr):
- return 'a914' + b58check_to_hex(addr) + '87'
-
-# Address representation to output script
-
-
-def address_to_script(addr):
- if addr[0] == '3' or addr[0] == '2':
- return mk_scripthash_script(addr)
- else:
- return mk_pubkey_script(addr)
-
-# Output script to address representation
-
-
-def script_to_address(script, vbyte=0):
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(
- script) == 25:
- return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
- else:
- if vbyte in [111, 196]:
- # Testnet
- scripthash_byte = 196
- else:
- scripthash_byte = 5
- # BIP0016 scripthash addresses
- return bin_to_b58check(script[2:-1], scripthash_byte)
-
-
-def p2sh_scriptaddr(script, magicbyte=5):
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- return hex_to_b58check(hash160(script), magicbyte)
-
-
-scriptaddr = p2sh_scriptaddr
-
-
-def deserialize_script(script):
- if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
- return json_changebase(
- deserialize_script(binascii.unhexlify(script)),
- lambda x: safe_hexlify(x))
- out, pos = [], 0
- while pos < len(script):
- code = from_byte_to_int(script[pos])
- if code == 0:
- out.append(None)
- pos += 1
- elif code <= 75:
- out.append(script[pos + 1:pos + 1 + code])
- pos += 1 + code
- elif code <= 78:
- szsz = pow(2, code - 76)
- sz = decode(script[pos + szsz:pos:-1], 256)
- out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
- pos += 1 + szsz + sz
- elif code <= 96:
- out.append(code - 80)
- pos += 1
- else:
- out.append(code)
- pos += 1
- return out
-
-
-def serialize_script_unit(unit):
- if isinstance(unit, int):
- if unit < 16:
- return from_int_to_byte(unit + 80)
- else:
- return bytes([unit])
- elif unit is None:
- return b'\x00'
- else:
- if len(unit) <= 75:
- return from_int_to_byte(len(unit)) + unit
- elif len(unit) < 256:
- return from_int_to_byte(76) + from_int_to_byte(len(unit)) + unit
- elif len(unit) < 65536:
- return from_int_to_byte(77) + encode(len(unit), 256, 2)[::-1] + unit
- else:
- return from_int_to_byte(78) + encode(len(unit), 256, 4)[::-1] + unit
-
-
-if is_python2:
-
- def serialize_script(script):
- if json_is_base(script, 16):
- return binascii.hexlify(serialize_script(json_changebase(
- script, lambda x: binascii.unhexlify(x))))
- return ''.join(map(serialize_script_unit, script))
-else:
-
- def serialize_script(script):
- if json_is_base(script, 16):
- return safe_hexlify(serialize_script(json_changebase(
- script, lambda x: binascii.unhexlify(x))))
-
- result = bytes()
- for b in map(serialize_script_unit, script):
- result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
- return result
-
-
-def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
- if isinstance(args[0], list):
- pubs, k = args[0], int(args[1])
- else:
- pubs = list(filter(lambda x: len(str(x)) >= 32, args))
- k = int(args[len(pubs)])
- return serialize_script([k] + pubs + [len(pubs)]) + 'ae'
-
-# Signing and verifying
-
-
-def verify_tx_input(tx, i, script, sig, pub):
- if re.match('^[0-9a-fA-F]*$', tx):
- tx = binascii.unhexlify(tx)
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- if not re.match('^[0-9a-fA-F]*$', sig):
- sig = safe_hexlify(sig)
- if not re.match('^[0-9a-fA-F]*$', pub):
- pub = safe_hexlify(pub)
- hashcode = decode(sig[-2:], 16)
- modtx = signature_form(tx, int(i), script, hashcode)
- return ecdsa_tx_verify(modtx, sig, pub, hashcode)
-
-
-def sign(tx, i, priv, hashcode=SIGHASH_ALL, usenonce=None):
- i = int(i)
- if (not is_python2 and isinstance(re, bytes)) or not re.match(
- '^[0-9a-fA-F]*$', tx):
- return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
- if len(priv) <= 33:
- priv = safe_hexlify(priv)
- pub = privkey_to_pubkey(priv, True)
- address = pubkey_to_address(pub)
- signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
- sig = ecdsa_tx_sign(signing_tx, priv, hashcode, usenonce=usenonce)
- txobj = deserialize(tx)
- txobj["ins"][i]["script"] = serialize_script([sig, pub])
- return serialize(txobj)
-
-
-def signall(tx, priv):
- # if priv is a dictionary, assume format is
- # { 'txinhash:txinidx' : privkey }
- if isinstance(priv, dict):
- for e, i in enumerate(deserialize(tx)["ins"]):
- k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
- tx = sign(tx, e, k)
- else:
- for i in range(len(deserialize(tx)["ins"])):
- tx = sign(tx, i, priv)
- return tx
-
-
-def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
- if re.match('^[0-9a-fA-F]*$', tx):
- tx = binascii.unhexlify(tx)
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- modtx = signature_form(tx, i, script, hashcode)
- return ecdsa_tx_sign(modtx, pk, hashcode)
-
-
-def apply_multisignatures(*args):
- # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
- tx, i, script = args[0], int(args[1]), args[2]
- sigs = args[3] if isinstance(args[3], list) else list(args[3:])
-
- if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- return safe_hexlify(apply_multisignatures(
- binascii.unhexlify(tx), i, script, sigs))
-
- txobj = deserialize(tx)
- txobj["ins"][i]["script"] = serialize_script([None] + sigs + [script])
- return serialize(txobj)
-
-
-def is_inp(arg):
- return len(arg) > 64 or "output" in arg or "outpoint" in arg
-
-
-def mktx(*args):
- # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
- ins, outs = [], []
- for arg in args:
- if isinstance(arg, list):
- for a in arg:
- (ins if is_inp(a) else outs).append(a)
- else:
- (ins if is_inp(arg) else outs).append(arg)
-
- txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
- for i in ins:
- if isinstance(i, dict) and "outpoint" in i:
- txobj["ins"].append(i)
- else:
- if isinstance(i, dict) and "output" in i:
- i = i["output"]
- txobj["ins"].append({
- "outpoint": {"hash": i[:64],
- "index": int(i[65:])},
- "script": "",
- "sequence": 4294967295
- })
- for o in outs:
- if isinstance(o, string_or_bytes_types):
- addr = o[:o.find(':')]
- val = int(o[o.find(':') + 1:])
- o = {}
- if re.match('^[0-9a-fA-F]*$', addr):
- o["script"] = addr
- else:
- o["address"] = addr
- o["value"] = val
-
- outobj = {}
- if "address" in o:
- outobj["script"] = address_to_script(o["address"])
- elif "script" in o:
- outobj["script"] = o["script"]
- else:
- raise Exception("Could not find 'address' or 'script' in output.")
- outobj["value"] = o["value"]
- txobj["outs"].append(outobj)
-
- return serialize(txobj)
-
-
-def select(unspent, value):
- value = int(value)
- high = [u for u in unspent if u["value"] >= value]
- high.sort(key=lambda u: u["value"])
- low = [u for u in unspent if u["value"] < value]
- low.sort(key=lambda u: -u["value"])
- if len(high):
- return [high[0]]
- i, tv = 0, 0
- while tv < value and i < len(low):
- tv += low[i]["value"]
- i += 1
- if tv < value:
- raise Exception("Not enough funds")
- return low[:i]
diff --git a/bitcoin/transaction.py b/bitcoin/transaction.py
@@ -1,490 +0,0 @@
-#!/usr/bin/python
-import binascii, re, json, copy, sys
-from bitcoin.main import *
-from _functools import reduce
-
-### Hex to bin converter and vice versa for objects
-
-
-def json_is_base(obj, base):
- if not is_python2 and isinstance(obj, bytes):
- return False
-
- alpha = get_code_string(base)
- if isinstance(obj, string_types):
- for i in range(len(obj)):
- if alpha.find(obj[i]) == -1:
- return False
- return True
- elif isinstance(obj, int_types) or obj is None:
- return True
- elif isinstance(obj, list):
- for i in range(len(obj)):
- if not json_is_base(obj[i], base):
- return False
- return True
- else:
- for x in obj:
- if not json_is_base(obj[x], base):
- return False
- return True
-
-
-def json_changebase(obj, changer):
- if isinstance(obj, string_or_bytes_types):
- return changer(obj)
- elif isinstance(obj, int_types) or obj is None:
- return obj
- elif isinstance(obj, list):
- return [json_changebase(x, changer) for x in obj]
- return dict((x, json_changebase(obj[x], changer)) for x in obj)
-
-# Transaction serialization and deserialization
-
-
-def deserialize(tx):
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- #tx = bytes(bytearray.fromhex(tx))
- return json_changebase(
- deserialize(binascii.unhexlify(tx)), lambda x: safe_hexlify(x))
- # http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
- # Python's scoping rules are demented, requiring me to make pos an object
- # so that it is call-by-reference
- pos = [0]
-
- def read_as_int(bytez):
- pos[0] += bytez
- return decode(tx[pos[0] - bytez:pos[0]][::-1], 256)
-
- def read_var_int():
- pos[0] += 1
-
- val = from_byte_to_int(tx[pos[0] - 1])
- if val < 253:
- return val
- return read_as_int(pow(2, val - 252))
-
- def read_bytes(bytez):
- pos[0] += bytez
- return tx[pos[0] - bytez:pos[0]]
-
- def read_var_string():
- size = read_var_int()
- return read_bytes(size)
-
- obj = {"ins": [], "outs": []}
- obj["version"] = read_as_int(4)
- ins = read_var_int()
- for i in range(ins):
- obj["ins"].append({
- "outpoint": {
- "hash": read_bytes(32)[::-1],
- "index": read_as_int(4)
- },
- "script": read_var_string(),
- "sequence": read_as_int(4)
- })
- outs = read_var_int()
- for i in range(outs):
- obj["outs"].append({
- "value": read_as_int(8),
- "script": read_var_string()
- })
- obj["locktime"] = read_as_int(4)
- return obj
-
-
-def serialize(txobj):
- #if isinstance(txobj, bytes):
- # txobj = bytes_to_hex_string(txobj)
- o = []
- if json_is_base(txobj, 16):
- json_changedbase = json_changebase(txobj,
- lambda x: binascii.unhexlify(x))
- hexlified = safe_hexlify(serialize(json_changedbase))
- return hexlified
- o.append(encode(txobj["version"], 256, 4)[::-1])
- o.append(num_to_var_int(len(txobj["ins"])))
- for inp in txobj["ins"]:
- o.append(inp["outpoint"]["hash"][::-1])
- o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
- o.append(num_to_var_int(len(inp["script"])) + (inp["script"] if inp[
- "script"] or is_python2 else bytes()))
- o.append(encode(inp["sequence"], 256, 4)[::-1])
- o.append(num_to_var_int(len(txobj["outs"])))
- for out in txobj["outs"]:
- o.append(encode(out["value"], 256, 8)[::-1])
- o.append(num_to_var_int(len(out["script"])) + out["script"])
- o.append(encode(txobj["locktime"], 256, 4)[::-1])
-
- return ''.join(o) if is_python2 else reduce(lambda x, y: x + y, o, bytes())
-
-# Hashing transactions for signing
-
-SIGHASH_ALL = 1
-SIGHASH_NONE = 2
-SIGHASH_SINGLE = 3
-# this works like SIGHASH_ANYONECANPAY | SIGHASH_ALL, might as well make it explicit while
-# we fix the constant
-SIGHASH_ANYONECANPAY = 0x81
-
-
-def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
- i, hashcode = int(i), int(hashcode)
- if isinstance(tx, string_or_bytes_types):
- return serialize(signature_form(deserialize(tx), i, script, hashcode))
- newtx = copy.deepcopy(tx)
- for inp in newtx["ins"]:
- inp["script"] = ""
- newtx["ins"][i]["script"] = script
- if hashcode == SIGHASH_NONE:
- newtx["outs"] = []
- elif hashcode == SIGHASH_SINGLE:
- newtx["outs"] = newtx["outs"][:len(newtx["ins"])]
- for out in range(len(newtx["ins"]) - 1):
- out.value = 2**64 - 1
- out.script = ""
- elif hashcode == SIGHASH_ANYONECANPAY:
- newtx["ins"] = [newtx["ins"][i]]
- else:
- pass
- return newtx
-
-# Making the actual signatures
-
-
-def der_encode_sig(v, r, s):
- """Takes (vbyte, r, s) as ints and returns hex der encode sig"""
- #See https://github.com/vbuterin/pybitcointools/issues/89
- #See https://github.com/simcity4242/pybitcointools/
- s = N - s if s > N // 2 else s # BIP62 low s
- b1, b2 = encode(r, 256), encode(s, 256)
- if bytearray(b1)[
- 0] & 0x80: # add null bytes if leading byte interpreted as negative
- b1 = b'\x00' + b1
- if bytearray(b2)[0] & 0x80:
- b2 = b'\x00' + b2
- left = b'\x02' + encode(len(b1), 256, 1) + b1
- right = b'\x02' + encode(len(b2), 256, 1) + b2
- return safe_hexlify(b'\x30' + encode(
- len(left + right), 256, 1) + left + right)
-
-
-def der_decode_sig(sig):
- leftlen = decode(sig[6:8], 16) * 2
- left = sig[8:8 + leftlen]
- rightlen = decode(sig[10 + leftlen:12 + leftlen], 16) * 2
- right = sig[12 + leftlen:12 + leftlen + rightlen]
- return (None, decode(left, 16), decode(right, 16))
-
-
-def txhash(tx, hashcode=None):
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- tx = changebase(tx, 16, 256)
- if hashcode:
- return dbl_sha256(from_string_to_bytes(tx) + encode(
- int(hashcode), 256, 4)[::-1])
- else:
- return safe_hexlify(bin_dbl_sha256(tx)[::-1])
-
-
-def bin_txhash(tx, hashcode=None):
- return binascii.unhexlify(txhash(tx, hashcode))
-
-
-def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL):
- rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv)
- return der_encode_sig(*rawsig) + encode(hashcode, 16, 2)
-
-
-def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
- return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub)
-
-# Scripts
-
-def mk_pubkey_script(addr):
- # Keep the auxiliary functions around for altcoins' sake
- return '76a914' + b58check_to_hex(addr) + '88ac'
-
-
-def mk_scripthash_script(addr):
- return 'a914' + b58check_to_hex(addr) + '87'
-
-# Address representation to output script
-
-
-def address_to_script(addr):
- if addr[0] == '3' or addr[0] == '2':
- return mk_scripthash_script(addr)
- else:
- return mk_pubkey_script(addr)
-
-# Output script to address representation
-
-
-def script_to_address(script, vbyte=0):
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(
- script) == 25:
- return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
- else:
- if vbyte in [111, 196]:
- # Testnet
- scripthash_byte = 196
- else:
- scripthash_byte = 5
- # BIP0016 scripthash addresses
- return bin_to_b58check(script[2:-1], scripthash_byte)
-
-
-def p2sh_scriptaddr(script, magicbyte=5):
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- return hex_to_b58check(hash160(script), magicbyte)
-
-
-scriptaddr = p2sh_scriptaddr
-
-
-def deserialize_script(script):
- if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
- return json_changebase(
- deserialize_script(binascii.unhexlify(script)),
- lambda x: safe_hexlify(x))
- out, pos = [], 0
- while pos < len(script):
- code = from_byte_to_int(script[pos])
- if code == 0:
- out.append(None)
- pos += 1
- elif code <= 75:
- out.append(script[pos + 1:pos + 1 + code])
- pos += 1 + code
- elif code <= 78:
- szsz = pow(2, code - 76)
- sz = decode(script[pos + szsz:pos:-1], 256)
- out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
- pos += 1 + szsz + sz
- elif code <= 96:
- out.append(code - 80)
- pos += 1
- else:
- out.append(code)
- pos += 1
- return out
-
-
-def serialize_script_unit(unit):
- if isinstance(unit, int):
- if unit < 16:
- return from_int_to_byte(unit + 80)
- else:
- return bytes([unit])
- elif unit is None:
- return b'\x00'
- else:
- if len(unit) <= 75:
- return from_int_to_byte(len(unit)) + unit
- elif len(unit) < 256:
- return from_int_to_byte(76) + from_int_to_byte(len(unit)) + unit
- elif len(unit) < 65536:
- return from_int_to_byte(77) + encode(len(unit), 256, 2)[::-1] + unit
- else:
- return from_int_to_byte(78) + encode(len(unit), 256, 4)[::-1] + unit
-
-
-if is_python2:
-
- def serialize_script(script):
- if json_is_base(script, 16):
- return binascii.hexlify(serialize_script(json_changebase(
- script, lambda x: binascii.unhexlify(x))))
- return ''.join(map(serialize_script_unit, script))
-else:
-
- def serialize_script(script):
- if json_is_base(script, 16):
- return safe_hexlify(serialize_script(json_changebase(
- script, lambda x: binascii.unhexlify(x))))
-
- result = bytes()
- for b in map(serialize_script_unit, script):
- result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
- return result
-
-
-def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
- if isinstance(args[0], list):
- pubs, k = args[0], int(args[1])
- else:
- pubs = list(filter(lambda x: len(str(x)) >= 32, args))
- k = int(args[len(pubs)])
- return serialize_script([k] + pubs + [len(pubs)]) + 'ae'
-
-# Signing and verifying
-
-
-def verify_tx_input(tx, i, script, sig, pub):
- if re.match('^[0-9a-fA-F]*$', tx):
- tx = binascii.unhexlify(tx)
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- if not re.match('^[0-9a-fA-F]*$', sig):
- sig = safe_hexlify(sig)
- hashcode = decode(sig[-2:], 16)
- modtx = signature_form(tx, int(i), script, hashcode)
- return ecdsa_tx_verify(modtx, sig, pub, hashcode)
-
-
-def sign(tx, i, priv, hashcode=SIGHASH_ALL):
- i = int(i)
- if (not is_python2 and isinstance(re, bytes)) or not re.match(
- '^[0-9a-fA-F]*$', tx):
- return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
- if len(priv) <= 33:
- priv = safe_hexlify(priv)
- pub = privkey_to_pubkey(priv)
- address = pubkey_to_address(pub)
- signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
- sig = ecdsa_tx_sign(signing_tx, priv, hashcode)
- txobj = deserialize(tx)
- txobj["ins"][i]["script"] = serialize_script([sig, pub])
- return serialize(txobj)
-
-
-def signall(tx, priv):
- # if priv is a dictionary, assume format is
- # { 'txinhash:txinidx' : privkey }
- if isinstance(priv, dict):
- for e, i in enumerate(deserialize(tx)["ins"]):
- k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
- tx = sign(tx, e, k)
- else:
- for i in range(len(deserialize(tx)["ins"])):
- tx = sign(tx, i, priv)
- return tx
-
-
-def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
- if re.match('^[0-9a-fA-F]*$', tx):
- tx = binascii.unhexlify(tx)
- if re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- modtx = signature_form(tx, i, script, hashcode)
- return ecdsa_tx_sign(modtx, pk, hashcode)
-
-
-def apply_multisignatures(*args):
- # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
- tx, i, script = args[0], int(args[1]), args[2]
- sigs = args[3] if isinstance(args[3], list) else list(args[3:])
-
- if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
- script = binascii.unhexlify(script)
- sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
- if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
- return safe_hexlify(apply_multisignatures(
- binascii.unhexlify(tx), i, script, sigs))
-
- txobj = deserialize(tx)
- txobj["ins"][i]["script"] = serialize_script([None] + sigs + [script])
- return serialize(txobj)
-
-
-def is_inp(arg):
- return len(arg) > 64 or "output" in arg or "outpoint" in arg
-
-
-def mktx(*args):
- # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
- ins, outs = [], []
- for arg in args:
- if isinstance(arg, list):
- for a in arg:
- (ins if is_inp(a) else outs).append(a)
- else:
- (ins if is_inp(arg) else outs).append(arg)
-
- txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
- for i in ins:
- if isinstance(i, dict) and "outpoint" in i:
- txobj["ins"].append(i)
- else:
- if isinstance(i, dict) and "output" in i:
- i = i["output"]
- txobj["ins"].append({
- "outpoint": {"hash": i[:64],
- "index": int(i[65:])},
- "script": "",
- "sequence": 4294967295
- })
- for o in outs:
- if isinstance(o, string_or_bytes_types):
- addr = o[:o.find(':')]
- val = int(o[o.find(':') + 1:])
- o = {}
- if re.match('^[0-9a-fA-F]*$', addr):
- o["script"] = addr
- else:
- o["address"] = addr
- o["value"] = val
-
- outobj = {}
- if "address" in o:
- outobj["script"] = address_to_script(o["address"])
- elif "script" in o:
- outobj["script"] = o["script"]
- else:
- raise Exception("Could not find 'address' or 'script' in output.")
- outobj["value"] = o["value"]
- txobj["outs"].append(outobj)
-
- return serialize(txobj)
-
-
-def select(unspent, value):
- value = int(value)
- high = [u for u in unspent if u["value"] >= value]
- high.sort(key=lambda u: u["value"])
- low = [u for u in unspent if u["value"] < value]
- low.sort(key=lambda u: -u["value"])
- if len(high):
- return [high[0]]
- i, tv = 0, 0
- while tv < value and i < len(low):
- tv += low[i]["value"]
- i += 1
- if tv < value:
- raise Exception("Not enough funds")
- return low[:i]
-
-# Only takes inputs of the form { "output": blah, "value": foo }
-
-
-def mksend(*args):
- argz, change, fee = args[:-2], args[-2], int(args[-1])
- ins, outs = [], []
- for arg in argz:
- if isinstance(arg, list):
- for a in arg:
- (ins if is_inp(a) else outs).append(a)
- else:
- (ins if is_inp(arg) else outs).append(arg)
-
- isum = sum([i["value"] for i in ins])
- osum, outputs2 = 0, []
- for o in outs:
- if isinstance(o, string_types):
- o2 = {"address": o[:o.find(':')], "value": int(o[o.find(':') + 1:])}
- else:
- o2 = o
- outputs2.append(o2)
- osum += o2["value"]
-
- if isum < osum + fee:
- raise Exception("Not enough money")
- elif isum > osum + fee + 5430:
- outputs2 += [{"address": change, "value": isum - osum - fee}]
-
- return mktx(ins, outputs2)
diff --git a/electrumpersonalserver/__init__.py b/electrumpersonalserver/__init__.py
@@ -1,10 +1,5 @@
+import electrumpersonalserver.bitcoin
+import electrumpersonalserver.server
-
-from electrumpersonalserver.merkleproof import convert_core_to_electrum_merkle_proof
-from electrumpersonalserver.jsonrpc import JsonRpc, JsonRpcError
-from electrumpersonalserver.hashes import (to_bytes, sha256, bh2u,
- script_to_scripthash, get_status_electrum, bfh, hash_encode,
- hash_decode, Hash, hash_merkle_root, hash_160, script_to_address,
- address_to_script, address_to_scripthash, bytes_fmt)
-from electrumpersonalserver.transactionmonitor import TransactionMonitor, import_addresses, ADDRESSES_LABEL
-from electrumpersonalserver.deterministicwallet import parse_electrum_master_public_key, DeterministicWallet
+__certfile__ = 'certs/cert.crt'
+__keyfile__ = 'certs/cert.key'
diff --git a/electrumpersonalserver/bitcoin/__init__.py b/electrumpersonalserver/bitcoin/__init__.py
@@ -0,0 +1,15 @@
+from electrumpersonalserver.bitcoin.py2specials import *
+from electrumpersonalserver.bitcoin.py3specials import *
+
+secp_present = False
+try:
+ import secp256k1
+
+ secp_present = True
+ from electrumpersonalserver.bitcoin.secp256k1_main import *
+ from electrumpersonalserver.bitcoin.secp256k1_transaction import *
+ from electrumpersonalserver.bitcoin.secp256k1_deterministic import *
+except ImportError as e:
+ from electrumpersonalserver.bitcoin.main import *
+ from electrumpersonalserver.bitcoin.deterministic import *
+ from electrumpersonalserver.bitcoin.transaction import *
diff --git a/electrumpersonalserver/bitcoin/deterministic.py b/electrumpersonalserver/bitcoin/deterministic.py
@@ -0,0 +1,183 @@
+from electrumpersonalserver.bitcoin.main import *
+import hmac
+import hashlib
+from binascii import hexlify
+
+# Below code ASSUMES binary inputs and compressed pubkeys
+MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
+#MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
+TESTNET_PRIVATE = b'\x04\x35\x83\x94'
+#TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
+PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
+#PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
+
+#updated for electrum's bip32 version bytes
+#only public keys because electrum personal server only needs them
+#https://github.com/spesmilo/electrum-docs/blob/master/xpub_version_bytes.rst
+PUBLIC = [ b'\x04\x88\xb2\x1e', #mainnet p2pkh or p2sh xpub
+ b'\x04\x9d\x7c\xb2', #mainnet p2wpkh-p2sh ypub
+ b'\x02\x95\xb4\x3f', #mainnet p2wsh-p2sh Ypub
+ b'\x04\xb2\x47\x46', #mainnet p2wpkh zpub
+ b'\x02\xaa\x7e\xd3', #mainnet p2wsh Zpub
+ b'\x04\x35\x87\xcf', #testnet p2pkh or p2sh tpub
+ b'\x04\x4a\x52\x62', #testnet p2wpkh-p2sh upub
+ b'\x02\x42\x89\xef', #testnet p2wsh-p2sh Upub
+ b'\x04\x5f\x1c\xf6', #testnet p2wpkh vpub
+ b'\x02\x57\x54\x83' #testnet p2wsh Vpub
+ ]
+
+# BIP32 child key derivation
+
+def raw_bip32_ckd(rawtuple, i):
+ vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
+ i = int(i)
+
+ if vbytes in PRIVATE:
+ priv = key
+ pub = privtopub(key)
+ else:
+ pub = key
+
+ if i >= 2**31:
+ if vbytes in PUBLIC:
+ raise Exception("Can't do private derivation on public key!")
+ I = hmac.new(chaincode, b'\x00' + priv[:32] + encode(i, 256, 4),
+ hashlib.sha512).digest()
+ else:
+ I = hmac.new(chaincode, pub + encode(i, 256, 4),
+ hashlib.sha512).digest()
+
+ if vbytes in PRIVATE:
+ newkey = add_privkeys(I[:32] + B'\x01', priv)
+ fingerprint = bin_hash160(privtopub(key))[:4]
+ if vbytes in PUBLIC:
+ newkey = add_pubkeys(compress(privtopub(I[:32])), key)
+ fingerprint = bin_hash160(key)[:4]
+
+ return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
+
+
+def bip32_serialize(rawtuple):
+ vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+ i = encode(i, 256, 4)
+ chaincode = encode(hash_to_int(chaincode), 256, 32)
+ keydata = b'\x00' + key[:-1] if vbytes in PRIVATE else key
+ bindata = vbytes + from_int_to_byte(
+ depth % 256) + fingerprint + i + chaincode + keydata
+ return changebase(bindata + bin_dbl_sha256(bindata)[:4], 256, 58)
+
+
+def bip32_deserialize(data):
+ dbin = changebase(data, 58, 256)
+ if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
+ raise Exception("Invalid checksum")
+ vbytes = dbin[0:4]
+ depth = from_byte_to_int(dbin[4])
+ fingerprint = dbin[5:9]
+ i = decode(dbin[9:13], 256)
+ chaincode = dbin[13:45]
+ key = dbin[46:78] + b'\x01' if vbytes in PRIVATE else dbin[45:78]
+ return (vbytes, depth, fingerprint, i, chaincode, key)
+
+
+def raw_bip32_privtopub(rawtuple):
+ vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+ newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
+ return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key))
+
+
+def bip32_privtopub(data):
+ return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
+
+
+def bip32_ckd(data, i):
+ return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
+
+
+def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
+ I = hmac.new(
+ from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
+ return bip32_serialize((vbytes, 0, b'\x00' * 4, 0, I[32:], I[:32] + b'\x01'
+ ))
+
+
+def bip32_bin_extract_key(data):
+ return bip32_deserialize(data)[-1]
+
+
+def bip32_extract_key(data):
+ return safe_hexlify(bip32_deserialize(data)[-1])
+
+# Exploits the same vulnerability as above in Electrum wallets
+# Takes a BIP32 pubkey and one of the child privkeys of its corresponding
+# privkey and returns the BIP32 privkey associated with that pubkey
+
+def raw_crack_bip32_privkey(parent_pub, priv):
+ vbytes, depth, fingerprint, i, chaincode, key = priv
+ pvbytes, pdepth, pfingerprint, pi, pchaincode, pkey = parent_pub
+ i = int(i)
+
+ if i >= 2**31:
+ raise Exception("Can't crack private derivation!")
+
+ I = hmac.new(pchaincode, pkey + encode(i, 256, 4), hashlib.sha512).digest()
+
+ pprivkey = subtract_privkeys(key, I[:32] + b'\x01')
+
+ newvbytes = MAINNET_PRIVATE if vbytes == MAINNET_PUBLIC else TESTNET_PRIVATE
+ return (newvbytes, pdepth, pfingerprint, pi, pchaincode, pprivkey)
+
+
+def crack_bip32_privkey(parent_pub, priv):
+ dsppub = bip32_deserialize(parent_pub)
+ dspriv = bip32_deserialize(priv)
+ return bip32_serialize(raw_crack_bip32_privkey(dsppub, dspriv))
+
+def bip32_descend(*args):
+ if len(args) == 2:
+ key, path = args
+ else:
+ key, path = args[0], map(int, args[1:])
+ for p in path:
+ key = bip32_ckd(key, p)
+ return bip32_extract_key(key)
+
+# electrum
+def electrum_stretch(seed):
+ return slowsha(seed)
+
+# Accepts seed or stretched seed, returns master public key
+
+def electrum_mpk(seed):
+ if len(seed) == 32:
+ seed = electrum_stretch(seed)
+ return privkey_to_pubkey(seed)[2:]
+
+# Accepts (seed or stretched seed), index and secondary index
+# (conventionally 0 for ordinary addresses, 1 for change) , returns privkey
+
+
+def electrum_privkey(seed, n, for_change=0):
+ if len(seed) == 32:
+ seed = electrum_stretch(seed)
+ mpk = electrum_mpk(seed)
+ offset = dbl_sha256(from_int_representation_to_bytes(n)+b':'+
+ from_int_representation_to_bytes(for_change)+b':'+
+ binascii.unhexlify(mpk))
+ return add_privkeys(seed, offset)
+
+# Accepts (seed or stretched seed or master pubkey), index and secondary index
+# (conventionally 0 for ordinary addresses, 1 for change) , returns pubkey
+
+def electrum_pubkey(masterkey, n, for_change=0):
+ if len(masterkey) == 32:
+ mpk = electrum_mpk(electrum_stretch(masterkey))
+ elif len(masterkey) == 64:
+ mpk = electrum_mpk(masterkey)
+ else:
+ mpk = masterkey
+ bin_mpk = encode_pubkey(mpk, 'bin_electrum')
+ offset = bin_dbl_sha256(from_int_representation_to_bytes(n)+b':'+
+ from_int_representation_to_bytes(for_change)+b':'+bin_mpk)
+ return add_pubkeys('04'+mpk, privtopub(offset))
+
diff --git a/bitcoin/main.py b/electrumpersonalserver/bitcoin/main.py
diff --git a/bitcoin/py2specials.py b/electrumpersonalserver/bitcoin/py2specials.py
diff --git a/bitcoin/py3specials.py b/electrumpersonalserver/bitcoin/py3specials.py
diff --git a/electrumpersonalserver/bitcoin/secp256k1_deterministic.py b/electrumpersonalserver/bitcoin/secp256k1_deterministic.py
@@ -0,0 +1,92 @@
+from electrumpersonalserver.bitcoin.secp256k1_main import *
+import hmac
+import hashlib
+from binascii import hexlify
+
+# Below code ASSUMES binary inputs and compressed pubkeys
+MAINNET_PRIVATE = b'\x04\x88\xAD\xE4'
+MAINNET_PUBLIC = b'\x04\x88\xB2\x1E'
+TESTNET_PRIVATE = b'\x04\x35\x83\x94'
+TESTNET_PUBLIC = b'\x04\x35\x87\xCF'
+PRIVATE = [MAINNET_PRIVATE, TESTNET_PRIVATE]
+PUBLIC = [MAINNET_PUBLIC, TESTNET_PUBLIC]
+
+# BIP32 child key derivation
+
+def raw_bip32_ckd(rawtuple, i):
+ vbytes, depth, fingerprint, oldi, chaincode, key = rawtuple
+ i = int(i)
+
+ if vbytes in PRIVATE:
+ priv = key
+ pub = privtopub(key, False)
+ else:
+ pub = key
+
+ if i >= 2**31:
+ if vbytes in PUBLIC:
+ raise Exception("Can't do private derivation on public key!")
+ I = hmac.new(chaincode, b'\x00' + priv[:32] + encode(i, 256, 4),
+ hashlib.sha512).digest()
+ else:
+ I = hmac.new(chaincode, pub + encode(i, 256, 4),
+ hashlib.sha512).digest()
+
+ if vbytes in PRIVATE:
+ newkey = add_privkeys(I[:32] + B'\x01', priv, False)
+ fingerprint = bin_hash160(privtopub(key, False))[:4]
+ if vbytes in PUBLIC:
+ newkey = add_pubkeys([privtopub(I[:32] + '\x01', False), key], False)
+ fingerprint = bin_hash160(key)[:4]
+
+ return (vbytes, depth + 1, fingerprint, i, I[32:], newkey)
+
+def bip32_serialize(rawtuple):
+ vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+ i = encode(i, 256, 4)
+ chaincode = encode(hash_to_int(chaincode), 256, 32)
+ keydata = b'\x00' + key[:-1] if vbytes in PRIVATE else key
+ bindata = vbytes + from_int_to_byte(
+ depth % 256) + fingerprint + i + chaincode + keydata
+ return changebase(bindata + bin_dbl_sha256(bindata)[:4], 256, 58)
+
+def bip32_deserialize(data):
+ dbin = changebase(data, 58, 256)
+ if bin_dbl_sha256(dbin[:-4])[:4] != dbin[-4:]:
+ raise Exception("Invalid checksum")
+ vbytes = dbin[0:4]
+ depth = from_byte_to_int(dbin[4])
+ fingerprint = dbin[5:9]
+ i = decode(dbin[9:13], 256)
+ chaincode = dbin[13:45]
+ key = dbin[46:78] + b'\x01' if vbytes in PRIVATE else dbin[45:78]
+ return (vbytes, depth, fingerprint, i, chaincode, key)
+
+def raw_bip32_privtopub(rawtuple):
+ vbytes, depth, fingerprint, i, chaincode, key = rawtuple
+ newvbytes = MAINNET_PUBLIC if vbytes == MAINNET_PRIVATE else TESTNET_PUBLIC
+ return (newvbytes, depth, fingerprint, i, chaincode, privtopub(key, False))
+
+def bip32_privtopub(data):
+ return bip32_serialize(raw_bip32_privtopub(bip32_deserialize(data)))
+
+def bip32_ckd(data, i):
+ return bip32_serialize(raw_bip32_ckd(bip32_deserialize(data), i))
+
+def bip32_master_key(seed, vbytes=MAINNET_PRIVATE):
+ I = hmac.new(
+ from_string_to_bytes("Bitcoin seed"), seed, hashlib.sha512).digest()
+ return bip32_serialize((vbytes, 0, b'\x00' * 4, 0, I[32:], I[:32] + b'\x01'
+ ))
+
+def bip32_extract_key(data):
+ return safe_hexlify(bip32_deserialize(data)[-1])
+
+def bip32_descend(*args):
+ if len(args) == 2:
+ key, path = args
+ else:
+ key, path = args[0], map(int, args[1:])
+ for p in path:
+ key = bip32_ckd(key, p)
+ return bip32_extract_key(key)
diff --git a/bitcoin/secp256k1_main.py b/electrumpersonalserver/bitcoin/secp256k1_main.py
diff --git a/electrumpersonalserver/bitcoin/secp256k1_transaction.py b/electrumpersonalserver/bitcoin/secp256k1_transaction.py
@@ -0,0 +1,452 @@
+#!/usr/bin/python
+import binascii, re, json, copy, sys
+from electrumpersonalserver.bitcoin.secp256k1_main import *
+from _functools import reduce
+import os
+
+is_python2 = sys.version_info.major == 2
+
+### Hex to bin converter and vice versa for objects
+def json_is_base(obj, base):
+ if not is_python2 and isinstance(obj, bytes):
+ return False
+
+ alpha = get_code_string(base)
+ if isinstance(obj, string_types):
+ for i in range(len(obj)):
+ if alpha.find(obj[i]) == -1:
+ return False
+ return True
+ elif isinstance(obj, int_types) or obj is None:
+ return True
+ elif isinstance(obj, list):
+ for i in range(len(obj)):
+ if not json_is_base(obj[i], base):
+ return False
+ return True
+ else:
+ for x in obj:
+ if not json_is_base(obj[x], base):
+ return False
+ return True
+
+
+def json_changebase(obj, changer):
+ if isinstance(obj, string_or_bytes_types):
+ return changer(obj)
+ elif isinstance(obj, int_types) or obj is None:
+ return obj
+ elif isinstance(obj, list):
+ return [json_changebase(x, changer) for x in obj]
+ return dict((x, json_changebase(obj[x], changer)) for x in obj)
+
+# Transaction serialization and deserialization
+
+
+def deserialize(tx):
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ #tx = bytes(bytearray.fromhex(tx))
+ return json_changebase(
+ deserialize(binascii.unhexlify(tx)), lambda x: safe_hexlify(x))
+ # http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
+ # Python's scoping rules are demented, requiring me to make pos an object
+ # so that it is call-by-reference
+ pos = [0]
+
+ def read_as_int(bytez):
+ pos[0] += bytez
+ return decode(tx[pos[0] - bytez:pos[0]][::-1], 256)
+
+ def read_var_int():
+ pos[0] += 1
+
+ val = from_byte_to_int(tx[pos[0] - 1])
+ if val < 253:
+ return val
+ return read_as_int(pow(2, val - 252))
+
+ def read_bytes(bytez):
+ pos[0] += bytez
+ return tx[pos[0] - bytez:pos[0]]
+
+ def read_var_string():
+ size = read_var_int()
+ return read_bytes(size)
+
+ obj = {"ins": [], "outs": []}
+ obj["version"] = read_as_int(4)
+ ins = read_var_int()
+ for i in range(ins):
+ obj["ins"].append({
+ "outpoint": {
+ "hash": read_bytes(32)[::-1],
+ "index": read_as_int(4)
+ },
+ "script": read_var_string(),
+ "sequence": read_as_int(4)
+ })
+ outs = read_var_int()
+ for i in range(outs):
+ obj["outs"].append({
+ "value": read_as_int(8),
+ "script": read_var_string()
+ })
+ obj["locktime"] = read_as_int(4)
+ return obj
+
+
+def serialize(txobj):
+ #if isinstance(txobj, bytes):
+ # txobj = bytes_to_hex_string(txobj)
+ o = []
+ if json_is_base(txobj, 16):
+ json_changedbase = json_changebase(txobj,
+ lambda x: binascii.unhexlify(x))
+ hexlified = safe_hexlify(serialize(json_changedbase))
+ return hexlified
+ o.append(encode(txobj["version"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(txobj["ins"])))
+ for inp in txobj["ins"]:
+ o.append(inp["outpoint"]["hash"][::-1])
+ o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(inp["script"])) + (inp["script"] if inp[
+ "script"] or is_python2 else bytes()))
+ o.append(encode(inp["sequence"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(txobj["outs"])))
+ for out in txobj["outs"]:
+ o.append(encode(out["value"], 256, 8)[::-1])
+ o.append(num_to_var_int(len(out["script"])) + out["script"])
+ o.append(encode(txobj["locktime"], 256, 4)[::-1])
+
+ return ''.join(o) if is_python2 else reduce(lambda x, y: x + y, o, bytes())
+
+# Hashing transactions for signing
+
+SIGHASH_ALL = 1
+SIGHASH_NONE = 2
+SIGHASH_SINGLE = 3
+SIGHASH_ANYONECANPAY = 0x80
+
+def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
+ i, hashcode = int(i), int(hashcode)
+ if isinstance(tx, string_or_bytes_types):
+ return serialize(signature_form(deserialize(tx), i, script, hashcode))
+ newtx = copy.deepcopy(tx)
+ for inp in newtx["ins"]:
+ inp["script"] = ""
+ newtx["ins"][i]["script"] = script
+ if hashcode & 0x1f == SIGHASH_NONE:
+ newtx["outs"] = []
+ for j, inp in enumerate(newtx["ins"]):
+ if j != i:
+ inp["sequence"] = 0
+ elif hashcode & 0x1f == SIGHASH_SINGLE:
+ if len(newtx["ins"]) > len(newtx["outs"]):
+ raise Exception(
+ "Transactions with sighash single should have len in <= len out")
+ newtx["outs"] = newtx["outs"][:i+1]
+ for out in newtx["outs"][:i]:
+ out['value'] = 2**64 - 1
+ out['script'] = ""
+ for j, inp in enumerate(newtx["ins"]):
+ if j != i:
+ inp["sequence"] = 0
+ if hashcode & SIGHASH_ANYONECANPAY:
+ newtx["ins"] = [newtx["ins"][i]]
+ else:
+ pass
+ return newtx
+
+def txhash(tx, hashcode=None):
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ tx = changebase(tx, 16, 256)
+ if hashcode:
+ return dbl_sha256(from_string_to_bytes(tx) + encode(
+ int(hashcode), 256, 4)[::-1])
+ else:
+ return safe_hexlify(bin_dbl_sha256(tx)[::-1])
+
+
+def bin_txhash(tx, hashcode=None):
+ return binascii.unhexlify(txhash(tx, hashcode))
+
+
+def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL, usenonce=None):
+ sig = ecdsa_raw_sign(
+ txhash(tx, hashcode),
+ priv,
+ True,
+ rawmsg=True,
+ usenonce=usenonce)
+ return sig + encode(hashcode, 16, 2)
+
+
+def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
+ return ecdsa_raw_verify(
+ txhash(tx, hashcode),
+ pub,
+ sig[:-2],
+ True,
+ rawmsg=True)
+
+# Scripts
+
+
+def mk_pubkey_script(addr):
+ # Keep the auxiliary functions around for altcoins' sake
+ return '76a914' + b58check_to_hex(addr) + '88ac'
+
+
+def mk_scripthash_script(addr):
+ return 'a914' + b58check_to_hex(addr) + '87'
+
+# Address representation to output script
+
+
+def address_to_script(addr):
+ if addr[0] == '3' or addr[0] == '2':
+ return mk_scripthash_script(addr)
+ else:
+ return mk_pubkey_script(addr)
+
+# Output script to address representation
+
+
+def script_to_address(script, vbyte=0):
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(
+ script) == 25:
+ return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
+ else:
+ if vbyte in [111, 196]:
+ # Testnet
+ scripthash_byte = 196
+ else:
+ scripthash_byte = 5
+ # BIP0016 scripthash addresses
+ return bin_to_b58check(script[2:-1], scripthash_byte)
+
+
+def p2sh_scriptaddr(script, magicbyte=5):
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ return hex_to_b58check(hash160(script), magicbyte)
+
+
+scriptaddr = p2sh_scriptaddr
+
+
+def deserialize_script(script):
+ if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+ return json_changebase(
+ deserialize_script(binascii.unhexlify(script)),
+ lambda x: safe_hexlify(x))
+ out, pos = [], 0
+ while pos < len(script):
+ code = from_byte_to_int(script[pos])
+ if code == 0:
+ out.append(None)
+ pos += 1
+ elif code <= 75:
+ out.append(script[pos + 1:pos + 1 + code])
+ pos += 1 + code
+ elif code <= 78:
+ szsz = pow(2, code - 76)
+ sz = decode(script[pos + szsz:pos:-1], 256)
+ out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
+ pos += 1 + szsz + sz
+ elif code <= 96:
+ out.append(code - 80)
+ pos += 1
+ else:
+ out.append(code)
+ pos += 1
+ return out
+
+
+def serialize_script_unit(unit):
+ if isinstance(unit, int):
+ if unit < 16:
+ return from_int_to_byte(unit + 80)
+ else:
+ return bytes([unit])
+ elif unit is None:
+ return b'\x00'
+ else:
+ if len(unit) <= 75:
+ return from_int_to_byte(len(unit)) + unit
+ elif len(unit) < 256:
+ return from_int_to_byte(76) + from_int_to_byte(len(unit)) + unit
+ elif len(unit) < 65536:
+ return from_int_to_byte(77) + encode(len(unit), 256, 2)[::-1] + unit
+ else:
+ return from_int_to_byte(78) + encode(len(unit), 256, 4)[::-1] + unit
+
+
+if is_python2:
+
+ def serialize_script(script):
+ if json_is_base(script, 16):
+ return binascii.hexlify(serialize_script(json_changebase(
+ script, lambda x: binascii.unhexlify(x))))
+ return ''.join(map(serialize_script_unit, script))
+else:
+
+ def serialize_script(script):
+ if json_is_base(script, 16):
+ return safe_hexlify(serialize_script(json_changebase(
+ script, lambda x: binascii.unhexlify(x))))
+
+ result = bytes()
+ for b in map(serialize_script_unit, script):
+ result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
+ return result
+
+
+def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
+ if isinstance(args[0], list):
+ pubs, k = args[0], int(args[1])
+ else:
+ pubs = list(filter(lambda x: len(str(x)) >= 32, args))
+ k = int(args[len(pubs)])
+ return serialize_script([k] + pubs + [len(pubs)]) + 'ae'
+
+# Signing and verifying
+
+
+def verify_tx_input(tx, i, script, sig, pub):
+ if re.match('^[0-9a-fA-F]*$', tx):
+ tx = binascii.unhexlify(tx)
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ if not re.match('^[0-9a-fA-F]*$', sig):
+ sig = safe_hexlify(sig)
+ if not re.match('^[0-9a-fA-F]*$', pub):
+ pub = safe_hexlify(pub)
+ hashcode = decode(sig[-2:], 16)
+ modtx = signature_form(tx, int(i), script, hashcode)
+ return ecdsa_tx_verify(modtx, sig, pub, hashcode)
+
+
+def sign(tx, i, priv, hashcode=SIGHASH_ALL, usenonce=None):
+ i = int(i)
+ if (not is_python2 and isinstance(re, bytes)) or not re.match(
+ '^[0-9a-fA-F]*$', tx):
+ return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
+ if len(priv) <= 33:
+ priv = safe_hexlify(priv)
+ pub = privkey_to_pubkey(priv, True)
+ address = pubkey_to_address(pub)
+ signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
+ sig = ecdsa_tx_sign(signing_tx, priv, hashcode, usenonce=usenonce)
+ txobj = deserialize(tx)
+ txobj["ins"][i]["script"] = serialize_script([sig, pub])
+ return serialize(txobj)
+
+
+def signall(tx, priv):
+ # if priv is a dictionary, assume format is
+ # { 'txinhash:txinidx' : privkey }
+ if isinstance(priv, dict):
+ for e, i in enumerate(deserialize(tx)["ins"]):
+ k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
+ tx = sign(tx, e, k)
+ else:
+ for i in range(len(deserialize(tx)["ins"])):
+ tx = sign(tx, i, priv)
+ return tx
+
+
+def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
+ if re.match('^[0-9a-fA-F]*$', tx):
+ tx = binascii.unhexlify(tx)
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ modtx = signature_form(tx, i, script, hashcode)
+ return ecdsa_tx_sign(modtx, pk, hashcode)
+
+
+def apply_multisignatures(*args):
+ # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
+ tx, i, script = args[0], int(args[1]), args[2]
+ sigs = args[3] if isinstance(args[3], list) else list(args[3:])
+
+ if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ return safe_hexlify(apply_multisignatures(
+ binascii.unhexlify(tx), i, script, sigs))
+
+ txobj = deserialize(tx)
+ txobj["ins"][i]["script"] = serialize_script([None] + sigs + [script])
+ return serialize(txobj)
+
+
+def is_inp(arg):
+ return len(arg) > 64 or "output" in arg or "outpoint" in arg
+
+
+def mktx(*args):
+ # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
+ ins, outs = [], []
+ for arg in args:
+ if isinstance(arg, list):
+ for a in arg:
+ (ins if is_inp(a) else outs).append(a)
+ else:
+ (ins if is_inp(arg) else outs).append(arg)
+
+ txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
+ for i in ins:
+ if isinstance(i, dict) and "outpoint" in i:
+ txobj["ins"].append(i)
+ else:
+ if isinstance(i, dict) and "output" in i:
+ i = i["output"]
+ txobj["ins"].append({
+ "outpoint": {"hash": i[:64],
+ "index": int(i[65:])},
+ "script": "",
+ "sequence": 4294967295
+ })
+ for o in outs:
+ if isinstance(o, string_or_bytes_types):
+ addr = o[:o.find(':')]
+ val = int(o[o.find(':') + 1:])
+ o = {}
+ if re.match('^[0-9a-fA-F]*$', addr):
+ o["script"] = addr
+ else:
+ o["address"] = addr
+ o["value"] = val
+
+ outobj = {}
+ if "address" in o:
+ outobj["script"] = address_to_script(o["address"])
+ elif "script" in o:
+ outobj["script"] = o["script"]
+ else:
+ raise Exception("Could not find 'address' or 'script' in output.")
+ outobj["value"] = o["value"]
+ txobj["outs"].append(outobj)
+
+ return serialize(txobj)
+
+
+def select(unspent, value):
+ value = int(value)
+ high = [u for u in unspent if u["value"] >= value]
+ high.sort(key=lambda u: u["value"])
+ low = [u for u in unspent if u["value"] < value]
+ low.sort(key=lambda u: -u["value"])
+ if len(high):
+ return [high[0]]
+ i, tv = 0, 0
+ while tv < value and i < len(low):
+ tv += low[i]["value"]
+ i += 1
+ if tv < value:
+ raise Exception("Not enough funds")
+ return low[:i]
diff --git a/electrumpersonalserver/bitcoin/transaction.py b/electrumpersonalserver/bitcoin/transaction.py
@@ -0,0 +1,490 @@
+#!/usr/bin/python
+import binascii, re, json, copy, sys
+from electrumpersonalserver.bitcoin.main import *
+from _functools import reduce
+
+### Hex to bin converter and vice versa for objects
+
+
+def json_is_base(obj, base):
+ if not is_python2 and isinstance(obj, bytes):
+ return False
+
+ alpha = get_code_string(base)
+ if isinstance(obj, string_types):
+ for i in range(len(obj)):
+ if alpha.find(obj[i]) == -1:
+ return False
+ return True
+ elif isinstance(obj, int_types) or obj is None:
+ return True
+ elif isinstance(obj, list):
+ for i in range(len(obj)):
+ if not json_is_base(obj[i], base):
+ return False
+ return True
+ else:
+ for x in obj:
+ if not json_is_base(obj[x], base):
+ return False
+ return True
+
+
+def json_changebase(obj, changer):
+ if isinstance(obj, string_or_bytes_types):
+ return changer(obj)
+ elif isinstance(obj, int_types) or obj is None:
+ return obj
+ elif isinstance(obj, list):
+ return [json_changebase(x, changer) for x in obj]
+ return dict((x, json_changebase(obj[x], changer)) for x in obj)
+
+# Transaction serialization and deserialization
+
+
+def deserialize(tx):
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ #tx = bytes(bytearray.fromhex(tx))
+ return json_changebase(
+ deserialize(binascii.unhexlify(tx)), lambda x: safe_hexlify(x))
+ # http://stackoverflow.com/questions/4851463/python-closure-write-to-variable-in-parent-scope
+ # Python's scoping rules are demented, requiring me to make pos an object
+ # so that it is call-by-reference
+ pos = [0]
+
+ def read_as_int(bytez):
+ pos[0] += bytez
+ return decode(tx[pos[0] - bytez:pos[0]][::-1], 256)
+
+ def read_var_int():
+ pos[0] += 1
+
+ val = from_byte_to_int(tx[pos[0] - 1])
+ if val < 253:
+ return val
+ return read_as_int(pow(2, val - 252))
+
+ def read_bytes(bytez):
+ pos[0] += bytez
+ return tx[pos[0] - bytez:pos[0]]
+
+ def read_var_string():
+ size = read_var_int()
+ return read_bytes(size)
+
+ obj = {"ins": [], "outs": []}
+ obj["version"] = read_as_int(4)
+ ins = read_var_int()
+ for i in range(ins):
+ obj["ins"].append({
+ "outpoint": {
+ "hash": read_bytes(32)[::-1],
+ "index": read_as_int(4)
+ },
+ "script": read_var_string(),
+ "sequence": read_as_int(4)
+ })
+ outs = read_var_int()
+ for i in range(outs):
+ obj["outs"].append({
+ "value": read_as_int(8),
+ "script": read_var_string()
+ })
+ obj["locktime"] = read_as_int(4)
+ return obj
+
+
+def serialize(txobj):
+ #if isinstance(txobj, bytes):
+ # txobj = bytes_to_hex_string(txobj)
+ o = []
+ if json_is_base(txobj, 16):
+ json_changedbase = json_changebase(txobj,
+ lambda x: binascii.unhexlify(x))
+ hexlified = safe_hexlify(serialize(json_changedbase))
+ return hexlified
+ o.append(encode(txobj["version"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(txobj["ins"])))
+ for inp in txobj["ins"]:
+ o.append(inp["outpoint"]["hash"][::-1])
+ o.append(encode(inp["outpoint"]["index"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(inp["script"])) + (inp["script"] if inp[
+ "script"] or is_python2 else bytes()))
+ o.append(encode(inp["sequence"], 256, 4)[::-1])
+ o.append(num_to_var_int(len(txobj["outs"])))
+ for out in txobj["outs"]:
+ o.append(encode(out["value"], 256, 8)[::-1])
+ o.append(num_to_var_int(len(out["script"])) + out["script"])
+ o.append(encode(txobj["locktime"], 256, 4)[::-1])
+
+ return ''.join(o) if is_python2 else reduce(lambda x, y: x + y, o, bytes())
+
+# Hashing transactions for signing
+
+SIGHASH_ALL = 1
+SIGHASH_NONE = 2
+SIGHASH_SINGLE = 3
+# this works like SIGHASH_ANYONECANPAY | SIGHASH_ALL, might as well make it explicit while
+# we fix the constant
+SIGHASH_ANYONECANPAY = 0x81
+
+
+def signature_form(tx, i, script, hashcode=SIGHASH_ALL):
+ i, hashcode = int(i), int(hashcode)
+ if isinstance(tx, string_or_bytes_types):
+ return serialize(signature_form(deserialize(tx), i, script, hashcode))
+ newtx = copy.deepcopy(tx)
+ for inp in newtx["ins"]:
+ inp["script"] = ""
+ newtx["ins"][i]["script"] = script
+ if hashcode == SIGHASH_NONE:
+ newtx["outs"] = []
+ elif hashcode == SIGHASH_SINGLE:
+ newtx["outs"] = newtx["outs"][:len(newtx["ins"])]
+ for out in range(len(newtx["ins"]) - 1):
+ out.value = 2**64 - 1
+ out.script = ""
+ elif hashcode == SIGHASH_ANYONECANPAY:
+ newtx["ins"] = [newtx["ins"][i]]
+ else:
+ pass
+ return newtx
+
+# Making the actual signatures
+
+
+def der_encode_sig(v, r, s):
+ """Takes (vbyte, r, s) as ints and returns hex der encode sig"""
+ #See https://github.com/vbuterin/pybitcointools/issues/89
+ #See https://github.com/simcity4242/pybitcointools/
+ s = N - s if s > N // 2 else s # BIP62 low s
+ b1, b2 = encode(r, 256), encode(s, 256)
+ if bytearray(b1)[
+ 0] & 0x80: # add null bytes if leading byte interpreted as negative
+ b1 = b'\x00' + b1
+ if bytearray(b2)[0] & 0x80:
+ b2 = b'\x00' + b2
+ left = b'\x02' + encode(len(b1), 256, 1) + b1
+ right = b'\x02' + encode(len(b2), 256, 1) + b2
+ return safe_hexlify(b'\x30' + encode(
+ len(left + right), 256, 1) + left + right)
+
+
+def der_decode_sig(sig):
+ leftlen = decode(sig[6:8], 16) * 2
+ left = sig[8:8 + leftlen]
+ rightlen = decode(sig[10 + leftlen:12 + leftlen], 16) * 2
+ right = sig[12 + leftlen:12 + leftlen + rightlen]
+ return (None, decode(left, 16), decode(right, 16))
+
+
+def txhash(tx, hashcode=None):
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ tx = changebase(tx, 16, 256)
+ if hashcode:
+ return dbl_sha256(from_string_to_bytes(tx) + encode(
+ int(hashcode), 256, 4)[::-1])
+ else:
+ return safe_hexlify(bin_dbl_sha256(tx)[::-1])
+
+
+def bin_txhash(tx, hashcode=None):
+ return binascii.unhexlify(txhash(tx, hashcode))
+
+
+def ecdsa_tx_sign(tx, priv, hashcode=SIGHASH_ALL):
+ rawsig = ecdsa_raw_sign(bin_txhash(tx, hashcode), priv)
+ return der_encode_sig(*rawsig) + encode(hashcode, 16, 2)
+
+
+def ecdsa_tx_verify(tx, sig, pub, hashcode=SIGHASH_ALL):
+ return ecdsa_raw_verify(bin_txhash(tx, hashcode), der_decode_sig(sig), pub)
+
+# Scripts
+
+def mk_pubkey_script(addr):
+ # Keep the auxiliary functions around for altcoins' sake
+ return '76a914' + b58check_to_hex(addr) + '88ac'
+
+
+def mk_scripthash_script(addr):
+ return 'a914' + b58check_to_hex(addr) + '87'
+
+# Address representation to output script
+
+
+def address_to_script(addr):
+ if addr[0] == '3' or addr[0] == '2':
+ return mk_scripthash_script(addr)
+ else:
+ return mk_pubkey_script(addr)
+
+# Output script to address representation
+
+
+def script_to_address(script, vbyte=0):
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(
+ script) == 25:
+ return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses
+ else:
+ if vbyte in [111, 196]:
+ # Testnet
+ scripthash_byte = 196
+ else:
+ scripthash_byte = 5
+ # BIP0016 scripthash addresses
+ return bin_to_b58check(script[2:-1], scripthash_byte)
+
+
+def p2sh_scriptaddr(script, magicbyte=5):
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ return hex_to_b58check(hash160(script), magicbyte)
+
+
+scriptaddr = p2sh_scriptaddr
+
+
+def deserialize_script(script):
+ if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+ return json_changebase(
+ deserialize_script(binascii.unhexlify(script)),
+ lambda x: safe_hexlify(x))
+ out, pos = [], 0
+ while pos < len(script):
+ code = from_byte_to_int(script[pos])
+ if code == 0:
+ out.append(None)
+ pos += 1
+ elif code <= 75:
+ out.append(script[pos + 1:pos + 1 + code])
+ pos += 1 + code
+ elif code <= 78:
+ szsz = pow(2, code - 76)
+ sz = decode(script[pos + szsz:pos:-1], 256)
+ out.append(script[pos + 1 + szsz:pos + 1 + szsz + sz])
+ pos += 1 + szsz + sz
+ elif code <= 96:
+ out.append(code - 80)
+ pos += 1
+ else:
+ out.append(code)
+ pos += 1
+ return out
+
+
+def serialize_script_unit(unit):
+ if isinstance(unit, int):
+ if unit < 16:
+ return from_int_to_byte(unit + 80)
+ else:
+ return bytes([unit])
+ elif unit is None:
+ return b'\x00'
+ else:
+ if len(unit) <= 75:
+ return from_int_to_byte(len(unit)) + unit
+ elif len(unit) < 256:
+ return from_int_to_byte(76) + from_int_to_byte(len(unit)) + unit
+ elif len(unit) < 65536:
+ return from_int_to_byte(77) + encode(len(unit), 256, 2)[::-1] + unit
+ else:
+ return from_int_to_byte(78) + encode(len(unit), 256, 4)[::-1] + unit
+
+
+if is_python2:
+
+ def serialize_script(script):
+ if json_is_base(script, 16):
+ return binascii.hexlify(serialize_script(json_changebase(
+ script, lambda x: binascii.unhexlify(x))))
+ return ''.join(map(serialize_script_unit, script))
+else:
+
+ def serialize_script(script):
+ if json_is_base(script, 16):
+ return safe_hexlify(serialize_script(json_changebase(
+ script, lambda x: binascii.unhexlify(x))))
+
+ result = bytes()
+ for b in map(serialize_script_unit, script):
+ result += b if isinstance(b, bytes) else bytes(b, 'utf-8')
+ return result
+
+
+def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k
+ if isinstance(args[0], list):
+ pubs, k = args[0], int(args[1])
+ else:
+ pubs = list(filter(lambda x: len(str(x)) >= 32, args))
+ k = int(args[len(pubs)])
+ return serialize_script([k] + pubs + [len(pubs)]) + 'ae'
+
+# Signing and verifying
+
+
+def verify_tx_input(tx, i, script, sig, pub):
+ if re.match('^[0-9a-fA-F]*$', tx):
+ tx = binascii.unhexlify(tx)
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ if not re.match('^[0-9a-fA-F]*$', sig):
+ sig = safe_hexlify(sig)
+ hashcode = decode(sig[-2:], 16)
+ modtx = signature_form(tx, int(i), script, hashcode)
+ return ecdsa_tx_verify(modtx, sig, pub, hashcode)
+
+
+def sign(tx, i, priv, hashcode=SIGHASH_ALL):
+ i = int(i)
+ if (not is_python2 and isinstance(re, bytes)) or not re.match(
+ '^[0-9a-fA-F]*$', tx):
+ return binascii.unhexlify(sign(safe_hexlify(tx), i, priv))
+ if len(priv) <= 33:
+ priv = safe_hexlify(priv)
+ pub = privkey_to_pubkey(priv)
+ address = pubkey_to_address(pub)
+ signing_tx = signature_form(tx, i, mk_pubkey_script(address), hashcode)
+ sig = ecdsa_tx_sign(signing_tx, priv, hashcode)
+ txobj = deserialize(tx)
+ txobj["ins"][i]["script"] = serialize_script([sig, pub])
+ return serialize(txobj)
+
+
+def signall(tx, priv):
+ # if priv is a dictionary, assume format is
+ # { 'txinhash:txinidx' : privkey }
+ if isinstance(priv, dict):
+ for e, i in enumerate(deserialize(tx)["ins"]):
+ k = priv["%s:%d" % (i["outpoint"]["hash"], i["outpoint"]["index"])]
+ tx = sign(tx, e, k)
+ else:
+ for i in range(len(deserialize(tx)["ins"])):
+ tx = sign(tx, i, priv)
+ return tx
+
+
+def multisign(tx, i, script, pk, hashcode=SIGHASH_ALL):
+ if re.match('^[0-9a-fA-F]*$', tx):
+ tx = binascii.unhexlify(tx)
+ if re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ modtx = signature_form(tx, i, script, hashcode)
+ return ecdsa_tx_sign(modtx, pk, hashcode)
+
+
+def apply_multisignatures(*args):
+ # tx,i,script,sigs OR tx,i,script,sig1,sig2...,sig[n]
+ tx, i, script = args[0], int(args[1]), args[2]
+ sigs = args[3] if isinstance(args[3], list) else list(args[3:])
+
+ if isinstance(script, str) and re.match('^[0-9a-fA-F]*$', script):
+ script = binascii.unhexlify(script)
+ sigs = [binascii.unhexlify(x) if x[:2] == '30' else x for x in sigs]
+ if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx):
+ return safe_hexlify(apply_multisignatures(
+ binascii.unhexlify(tx), i, script, sigs))
+
+ txobj = deserialize(tx)
+ txobj["ins"][i]["script"] = serialize_script([None] + sigs + [script])
+ return serialize(txobj)
+
+
+def is_inp(arg):
+ return len(arg) > 64 or "output" in arg or "outpoint" in arg
+
+
+def mktx(*args):
+ # [in0, in1...],[out0, out1...] or in0, in1 ... out0 out1 ...
+ ins, outs = [], []
+ for arg in args:
+ if isinstance(arg, list):
+ for a in arg:
+ (ins if is_inp(a) else outs).append(a)
+ else:
+ (ins if is_inp(arg) else outs).append(arg)
+
+ txobj = {"locktime": 0, "version": 1, "ins": [], "outs": []}
+ for i in ins:
+ if isinstance(i, dict) and "outpoint" in i:
+ txobj["ins"].append(i)
+ else:
+ if isinstance(i, dict) and "output" in i:
+ i = i["output"]
+ txobj["ins"].append({
+ "outpoint": {"hash": i[:64],
+ "index": int(i[65:])},
+ "script": "",
+ "sequence": 4294967295
+ })
+ for o in outs:
+ if isinstance(o, string_or_bytes_types):
+ addr = o[:o.find(':')]
+ val = int(o[o.find(':') + 1:])
+ o = {}
+ if re.match('^[0-9a-fA-F]*$', addr):
+ o["script"] = addr
+ else:
+ o["address"] = addr
+ o["value"] = val
+
+ outobj = {}
+ if "address" in o:
+ outobj["script"] = address_to_script(o["address"])
+ elif "script" in o:
+ outobj["script"] = o["script"]
+ else:
+ raise Exception("Could not find 'address' or 'script' in output.")
+ outobj["value"] = o["value"]
+ txobj["outs"].append(outobj)
+
+ return serialize(txobj)
+
+
+def select(unspent, value):
+ value = int(value)
+ high = [u for u in unspent if u["value"] >= value]
+ high.sort(key=lambda u: u["value"])
+ low = [u for u in unspent if u["value"] < value]
+ low.sort(key=lambda u: -u["value"])
+ if len(high):
+ return [high[0]]
+ i, tv = 0, 0
+ while tv < value and i < len(low):
+ tv += low[i]["value"]
+ i += 1
+ if tv < value:
+ raise Exception("Not enough funds")
+ return low[:i]
+
+# Only takes inputs of the form { "output": blah, "value": foo }
+
+
+def mksend(*args):
+ argz, change, fee = args[:-2], args[-2], int(args[-1])
+ ins, outs = [], []
+ for arg in argz:
+ if isinstance(arg, list):
+ for a in arg:
+ (ins if is_inp(a) else outs).append(a)
+ else:
+ (ins if is_inp(arg) else outs).append(arg)
+
+ isum = sum([i["value"] for i in ins])
+ osum, outputs2 = 0, []
+ for o in outs:
+ if isinstance(o, string_types):
+ o2 = {"address": o[:o.find(':')], "value": int(o[o.find(':') + 1:])}
+ else:
+ o2 = o
+ outputs2.append(o2)
+ osum += o2["value"]
+
+ if isum < osum + fee:
+ raise Exception("Not enough money")
+ elif isum > osum + fee + 5430:
+ outputs2 += [{"address": change, "value": isum - osum - fee}]
+
+ return mktx(ins, outputs2)
diff --git a/certs/cert.crt b/electrumpersonalserver/certs/cert.crt
diff --git a/certs/cert.key b/electrumpersonalserver/certs/cert.key
diff --git a/electrumpersonalserver/deterministicwallet.py b/electrumpersonalserver/deterministicwallet.py
@@ -1,221 +0,0 @@
-
-import bitcoin as btc
-from electrumpersonalserver.hashes import bh2u, hash_160, bfh, sha256
-
-# the class hierarchy for deterministic wallets in this file:
-# subclasses are written towards the right
-# each class knows how to create the scriptPubKeys of that wallet
-#
-# |-- SingleSigOldMnemonicWallet
-# |-- SingleSigP2PKHWallet
-# |-- SingleSigP2WPKHWallet
-# SingleSigWallet --|
-# / |-- SingleSigP2WPKH_P2SHWallet
-# DeterministicWallet
-# \ |-- MultisigP2SHWallet
-# MultisigWallet --|
-# |-- MultisigP2WSHWallet
-# |-- MultisigP2WSH_P2SHWallet
-
-#the wallet types are here
-#https://github.com/spesmilo/electrum/blob/3.0.6/RELEASE-NOTES
-#and
-#https://github.com/spesmilo/electrum-docs/blob/master/xpub_version_bytes.rst
-
-def is_string_parsable_as_hex_int(s):
- try:
- int(s, 16)
- return True
- except:
- return False
-
-def parse_electrum_master_public_key(keydata, gaplimit):
- if keydata[:4] in ("xpub", "tpub"):
- wallet = SingleSigP2PKHWallet(keydata)
- elif keydata[:4] in ("zpub", "vpub"):
- wallet = SingleSigP2WPKHWallet(keydata)
- elif keydata[:4] in ("ypub", "upub"):
- wallet = SingleSigP2WPKH_P2SHWallet(keydata)
- elif keydata.find(" ") != -1: #multiple keys = multisig
- chunks = keydata.split(" ")
- try:
- m = int(chunks[0])
- except ValueError:
- raise ValueError("Unable to parse m in multisig key data: "
- + chunks[0])
- pubkeys = chunks[1:]
- if not all([pubkeys[0][:4] == pub[:4] for pub in pubkeys[1:]]):
- raise ValueError("Inconsistent master public key types")
- if pubkeys[0][:4] in ("xpub", "tpub"):
- wallet = MultisigP2SHWallet(m, pubkeys)
- elif pubkeys[0][:4] in ("Zpub", "Vpub"):
- wallet = MultisigP2WSHWallet(m, pubkeys)
- elif pubkeys[0][:4] in ("Ypub", "Upub"):
- wallet = MultisigP2WSH_P2SHWallet(m, pubkeys)
- elif is_string_parsable_as_hex_int(keydata) and len(keydata) == 128:
- wallet = SingleSigOldMnemonicWallet(keydata)
- else:
- raise ValueError("Unrecognized electrum mpk format: " + keydata[:4])
- wallet.gaplimit = gaplimit
- return wallet
-
-class DeterministicWallet(object):
- def __init__(self):
- self.gaplimit = 0
- self.next_index = [0, 0]
- self.scriptpubkey_index = {}
-
- def get_new_scriptpubkeys(self, change, count):
- """Returns newly-generated addresses from this deterministic wallet"""
- return self.get_scriptpubkeys(change, self.next_index[change],
- count)
-
- def get_scriptpubkeys(self, change, from_index, count):
- """Returns addresses from this deterministic wallet"""
- pass
-
- #called in check_for_new_txes() when a new tx of ours arrives
- #to see if we need to import more addresses
- def have_scriptpubkeys_overrun_gaplimit(self, scriptpubkeys):
- """Return None if they havent, or how many addresses to
- import if they have"""
- result = {}
- for spk in scriptpubkeys:
- if spk not in self.scriptpubkey_index:
- continue
- change, index = self.scriptpubkey_index[spk]
- distance_from_next = self.next_index[change] - index
- if distance_from_next > self.gaplimit:
- continue
- #need to import more
- if change in result:
- result[change] = max(result[change], self.gaplimit
- - distance_from_next + 1)
- else:
- result[change] = self.gaplimit - distance_from_next + 1
- if len(result) > 0:
- return result
- else:
- return None
-
- def rewind_one(self, change):
- """Go back one pubkey in a branch"""
- self.next_index[change] -= 1
-
-class SingleSigWallet(DeterministicWallet):
- def __init__(self, mpk):
- super(SingleSigWallet, self).__init__()
- try:
- self.branches = (btc.bip32_ckd(mpk, 0), btc.bip32_ckd(mpk, 1))
- except Exception:
- raise ValueError("Bad master public key format. Get it from " +
- "Electrum menu `Wallet` -> `Information`")
- #m/change/i
-
- def pubkey_to_scriptpubkey(self, pubkey):
- raise RuntimeError()
-
- def get_pubkey(self, change, index):
- return btc.bip32_extract_key(btc.bip32_ckd(self.branches[change],
- index))
-
- def get_scriptpubkeys(self, change, from_index, count):
- result = []
- for index in range(from_index, from_index + count):
- pubkey = self.get_pubkey(change, index)
- scriptpubkey = self.pubkey_to_scriptpubkey(pubkey)
- self.scriptpubkey_index[scriptpubkey] = (change, index)
- result.append(scriptpubkey)
- self.next_index[change] = max(self.next_index[change], from_index+count)
- return result
-
-class SingleSigP2PKHWallet(SingleSigWallet):
- def pubkey_to_scriptpubkey(self, pubkey):
- pkh = bh2u(hash_160(bfh(pubkey)))
- #op_dup op_hash_160 length hash160 op_equalverify op_checksig
- return "76a914" + pkh + "88ac"
-
-class SingleSigP2WPKHWallet(SingleSigWallet):
- def pubkey_to_scriptpubkey(self, pubkey):
- pkh = bh2u(hash_160(bfh(pubkey)))
- #witness-version length hash160
- #witness version is always 0, length is always 0x14
- return "0014" + pkh
-
-class SingleSigP2WPKH_P2SHWallet(SingleSigWallet):
- def pubkey_to_scriptpubkey(self, pubkey):
- #witness-version length pubkeyhash
- #witness version is always 0, length is always 0x14
- redeem_script = '0014' + bh2u(hash_160(bfh(pubkey)))
- sh = bh2u(hash_160(bfh(redeem_script)))
- return "a914" + sh + "87"
-
-class SingleSigOldMnemonicWallet(SingleSigWallet):
- def __init__(self, mpk):
- super(SingleSigWallet, self).__init__()
- self.mpk = mpk
-
- def get_pubkey(self, change, index):
- return btc.electrum_pubkey(self.mpk, index, change)
-
- def pubkey_to_scriptpubkey(self, pubkey):
- pkh = bh2u(hash_160(bfh(pubkey)))
- #op_dup op_hash_160 length hash160 op_equalverify op_checksig
- return "76a914" + pkh + "88ac"
-
-class MultisigWallet(DeterministicWallet):
- def __init__(self, m, mpk_list):
- super(MultisigWallet, self).__init__()
- self.m = m
- try:
- self.pubkey_branches = [(btc.bip32_ckd(mpk, 0), btc.bip32_ckd(mpk,
- 1)) for mpk in mpk_list]
- except Exception:
- raise ValueError("Bad master public key format. Get it from " +
- "Electrum menu `Wallet` -> `Information`")
- #derivation path for pubkeys is m/change/index
-
- def redeem_script_to_scriptpubkey(self, redeem_script):
- raise RuntimeError()
-
- def get_scriptpubkeys(self, change, from_index, count):
- result = []
- for index in range(from_index, from_index + count):
- pubkeys = [btc.bip32_extract_key(btc.bip32_ckd(branch[change],
- index)) for branch in self.pubkey_branches]
- pubkeys = sorted(pubkeys)
- redeemScript = ""
- redeemScript += "%x"%(0x50 + self.m) #op_m
- for p in pubkeys:
- redeemScript += "21" #length
- redeemScript += p
- redeemScript += "%x"%(0x50 + len(pubkeys)) #op_n
- redeemScript += "ae" # op_checkmultisig
- scriptpubkey = self.redeem_script_to_scriptpubkey(redeemScript)
- self.scriptpubkey_index[scriptpubkey] = (change, index)
- result.append(scriptpubkey)
- self.next_index[change] = max(self.next_index[change], from_index+count)
- return result
-
-class MultisigP2SHWallet(MultisigWallet):
- def redeem_script_to_scriptpubkey(self, redeem_script):
- sh = bh2u(hash_160(bfh(redeem_script)))
- #op_hash160 length hash160 op_equal
- return "a914" + sh + "87"
-
-class MultisigP2WSHWallet(MultisigWallet):
- def redeem_script_to_scriptpubkey(self, redeem_script):
- sh = bh2u(sha256(bfh(redeem_script)))
- #witness-version length sha256
- #witness version is always 0, length is always 0x20
- return "0020" + sh
-
-class MultisigP2WSH_P2SHWallet(MultisigWallet):
- def redeem_script_to_scriptpubkey(self, redeem_script):
- #witness-version length sha256
- #witness version is always 0, length is always 0x20
- nested_redeemScript = "0020" + bh2u(sha256(bfh(redeem_script)))
- sh = bh2u(hash_160(bfh(nested_redeemScript)))
- #op_hash160 length hash160 op_equal
- return "a914" + sh + "87"
-
diff --git a/electrumpersonalserver/merkleproof.py b/electrumpersonalserver/merkleproof.py
@@ -1,145 +0,0 @@
-
-import bitcoin as btc
-import binascii
-from math import ceil, log
-
-from electrumpersonalserver.hashes import hash_encode, hash_decode
-from electrumpersonalserver.hashes import Hash
-
-#lots of ideas and code taken from bitcoin core and breadwallet
-#https://github.com/bitcoin/bitcoin/blob/master/src/merkleblock.h
-#https://github.com/breadwallet/breadwallet-core/blob/master/BRMerkleBlock.c
-
-def calc_tree_width(height, txcount):
- """Efficently calculates the number of nodes at given merkle tree height"""
- return (txcount + (1 << height) - 1) >> height
-
-def decend_merkle_tree(hashes, flags, height, txcount, pos):
- """Function recursively follows the flags bitstring down into the
- tree, building up a tree in memory"""
- flag = next(flags)
- if height > 0:
- #non-txid node
- if flag:
- left = decend_merkle_tree(hashes, flags, height-1, txcount, pos*2)
- #bitcoin's merkle tree format has a rule that if theres an
- # odd number of nodes in then the tree, the last hash is duplicated
- #in the electrum format we must hash together the duplicate
- # tree branch
- if pos*2+1 < calc_tree_width(height-1, txcount):
- right = decend_merkle_tree(hashes, flags, height-1,
- txcount, pos*2+1)
- else:
- if isinstance(left, tuple):
- right = expand_tree_hashing(left)
- else:
- right = left
- return (left, right)
- else:
- hs = next(hashes)
- return hs
- else:
- #txid node
- hs = next(hashes)
- if flag:
- #for the actual transaction, also store its position with a flag
- return "tx:" + str(pos) + ":" + hs
- else:
- return hs
-
-def deserialize_core_format_merkle_proof(hash_list, flag_value, txcount):
- """Converts core's format for a merkle proof into a tree in memory"""
- tree_depth = int(ceil(log(txcount, 2)))
- hashes = iter(hash_list)
- #one-liner which converts the flags value to a list of True/False bits
- flags = (flag_value[i//8]&1 << i%8 != 0 for i in range(len(flag_value)*8))
- try:
- root_node = decend_merkle_tree(hashes, flags, tree_depth, txcount, 0)
- return root_node
- except StopIteration:
- raise ValueError
-
-def expand_tree_electrum_format_merkle_proof(node, result):
- """Recurse down into the tree, adding hashes to the result list
- in depth order"""
- left, right = node
- if isinstance(left, tuple):
- expand_tree_electrum_format_merkle_proof(left, result)
- if isinstance(right, tuple):
- expand_tree_electrum_format_merkle_proof(right, result)
- if not isinstance(left, tuple):
- result.append(left)
- if not isinstance(right, tuple):
- result.append(right)
-
-def get_node_hash(node):
- if node.startswith("tx"):
- return node.split(":")[2]
- else:
- return node
-
-def expand_tree_hashing(node):
- """Recurse down into the tree, hashing everything and
- returning root hash"""
- left, right = node
- if isinstance(left, tuple):
- hash_left = expand_tree_hashing(left)
- else:
- hash_left = get_node_hash(left)
- if isinstance(right, tuple):
- hash_right = expand_tree_hashing(right)
- else:
- hash_right = get_node_hash(right)
- return hash_encode(Hash(hash_decode(hash_left) + hash_decode(hash_right)))
-
-def convert_core_to_electrum_merkle_proof(proof):
- """Bitcoin Core and Electrum use different formats for merkle
- proof, this function converts from Core's format to Electrum's format"""
- proof = binascii.unhexlify(proof)
- pos = [0]
- def read_as_int(bytez):
- pos[0] += bytez
- return btc.decode(proof[pos[0] - bytez:pos[0]][::-1], 256)
- def read_var_int():
- pos[0] += 1
- val = btc.from_byte_to_int(proof[pos[0] - 1])
- if val < 253:
- return val
- return read_as_int(pow(2, val - 252))
- def read_bytes(bytez):
- pos[0] += bytez
- return proof[pos[0] - bytez:pos[0]]
-
- merkle_root = proof[36:36+32]
- pos[0] = 80
- txcount = read_as_int(4)
- hash_count = read_var_int()
- hashes = [hash_encode(read_bytes(32)) for i in range(hash_count)]
- flags_count = read_var_int()
- flags = read_bytes(flags_count)
-
- root_node = deserialize_core_format_merkle_proof(hashes, flags, txcount)
- #check special case of a tree of zero height, block with only coinbase tx
- if not isinstance(root_node, tuple):
- root_node = root_node[5:] #remove the "tx:0:"
- result = {"pos": 0, "merkle": [], "txid": root_node,
- "merkleroot": hash_encode(merkle_root)}
- return result
-
- hashes_list = []
- expand_tree_electrum_format_merkle_proof(root_node, hashes_list)
- #remove the first or second element which is the txhash
- tx = hashes_list[0]
- if hashes_list[1].startswith("tx"):
- tx = hashes_list[1]
- assert(tx.startswith("tx"))
- hashes_list.remove(tx)
- #if the txhash was duplicated, that _is_ included in electrum's format
- if hashes_list[0].startswith("tx"):
- hashes_list[0] = tx.split(":")[2]
- tx_pos, txid = tx.split(":")[1:3]
- tx_pos = int(tx_pos)
- result = {"pos": tx_pos, "merkle": hashes_list, "txid": txid,
- "merkleroot": hash_encode(merkle_root)}
- return result
-
diff --git a/electrumpersonalserver/server/__init__.py b/electrumpersonalserver/server/__init__.py
@@ -0,0 +1,30 @@
+from electrumpersonalserver.server.merkleproof import (
+ convert_core_to_electrum_merkle_proof
+)
+from electrumpersonalserver.server.jsonrpc import JsonRpc, JsonRpcError
+from electrumpersonalserver.server.hashes import (
+ to_bytes,
+ sha256,
+ bh2u,
+ script_to_scripthash,
+ get_status_electrum,
+ bfh,
+ hash_encode,
+ hash_decode,
+ Hash,
+ hash_merkle_root,
+ hash_160,
+ script_to_address,
+ address_to_script,
+ address_to_scripthash,
+ bytes_fmt,
+)
+from electrumpersonalserver.server.transactionmonitor import (
+ TransactionMonitor,
+ import_addresses,
+ ADDRESSES_LABEL,
+)
+from electrumpersonalserver.server.deterministicwallet import (
+ parse_electrum_master_public_key,
+ DeterministicWallet,
+)
diff --git a/electrumpersonalserver/server/common.py b/electrumpersonalserver/server/common.py
@@ -0,0 +1,680 @@
+import socket, time, json, datetime, struct, binascii, ssl, os, os.path
+from configparser import ConfigParser, NoSectionError, NoOptionError
+from collections import defaultdict
+import traceback, sys, platform
+from ipaddress import ip_network, ip_address
+import logging
+
+from electrumpersonalserver.server.jsonrpc import JsonRpc, JsonRpcError
+import electrumpersonalserver.server.hashes as hashes
+import electrumpersonalserver.server.merkleproof as merkleproof
+import electrumpersonalserver.server.deterministicwallet as deterministicwallet
+import electrumpersonalserver.server.transactionmonitor as transactionmonitor
+
+VERSION_NUMBER = "0.1"
+
+DONATION_ADDR = "bc1q5d8l0w33h65e2l5x7ty6wgnvkvlqcz0wfaslpz"
+
+BANNER = \
+"""Welcome to Electrum Personal Server
+
+Monitoring {detwallets} deterministic wallets, in total {addr} addresses.
+
+Connected bitcoin node: {useragent}
+Peers: {peers}
+Uptime: {uptime}
+Blocksonly: {blocksonly}
+Pruning: {pruning}
+Download: {recvbytes}
+Upload: {sentbytes}
+
+https://github.com/chris-belcher/electrum-personal-server
+
+Donate to help make Electrum Personal Server even better:
+{donationaddr}
+
+"""
+
+##python has demented rules for variable scope, so these
+## global variables are actually mutable lists
+subscribed_to_headers = [False]
+are_headers_raw = [False]
+bestblockhash = [None]
+
+#log for checking up/seeing your wallet, debug for when something has gone wrong
+def logger_config(logger, fmt=None, filename=None, logfilemode='w'):
+ formatter = logging.Formatter(fmt)
+ logstream = logging.StreamHandler()
+ logstream.setFormatter(formatter)
+ logstream.setLevel(logging.INFO)
+ logger.addHandler(logstream)
+ if filename:
+ logfile = logging.FileHandler(filename, mode=logfilemode)
+ logfile.setFormatter(formatter)
+ logfile.setLevel(logging.DEBUG)
+ logger.addHandler(logfile)
+ return logger
+
+def send_response(sock, query, result):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ query["result"] = result
+ query["jsonrpc"] = "2.0"
+ sock.sendall(json.dumps(query).encode('utf-8') + b'\n')
+ logger.debug('<= ' + json.dumps(query))
+
+def send_update(sock, update):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ update["jsonrpc"] = "2.0"
+ sock.sendall(json.dumps(update).encode('utf-8') + b'\n')
+ logger.debug('<= ' + json.dumps(update))
+
+def send_error(sock, nid, error):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ payload = {"error": error, "jsonrpc": "2.0", "id": nid}
+ sock.sendall(json.dumps(payload).encode('utf-8') + b'\n')
+ logger.debug('<= ' + json.dumps(payload))
+
+def on_heartbeat_listening(txmonitor):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.debug("on heartbeat listening")
+ txmonitor.check_for_updated_txes()
+
+def on_heartbeat_connected(sock, rpc, txmonitor):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.debug("on heartbeat connected")
+ is_tip_updated, header = check_for_new_blockchain_tip(rpc,
+ are_headers_raw[0])
+ if is_tip_updated:
+ logger.debug("Blockchain tip updated")
+ if subscribed_to_headers[0]:
+ update = {"method": "blockchain.headers.subscribe",
+ "params": [header]}
+ send_update(sock, update)
+ updated_scripthashes = txmonitor.check_for_updated_txes()
+ for scrhash in updated_scripthashes:
+ history_hash = txmonitor.get_electrum_history_hash(scrhash)
+ update = {"method": "blockchain.scripthash.subscribe", "params":
+ [scrhash, history_hash]}
+ send_update(sock, update)
+
+def on_disconnect(txmonitor):
+ subscribed_to_headers[0] = False
+ txmonitor.unsubscribe_all_addresses()
+
+def handle_query(sock, line, rpc, txmonitor):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.debug("=> " + line)
+ try:
+ query = json.loads(line)
+ except json.decoder.JSONDecodeError as e:
+ raise IOError(e)
+ method = query["method"]
+
+ #protocol documentation
+ #https://github.com/kyuupichan/electrumx/blob/master/docs/PROTOCOL.rst
+ if method == "blockchain.transaction.get":
+ tx = rpc.call("gettransaction", [query["params"][0]])
+ send_response(sock, query, tx["hex"])
+ elif method == "blockchain.transaction.get_merkle":
+ txid = query["params"][0]
+ try:
+ tx = rpc.call("gettransaction", [txid])
+ core_proof = rpc.call("gettxoutproof", [[txid], tx["blockhash"]])
+ electrum_proof = merkleproof.convert_core_to_electrum_merkle_proof(
+ core_proof)
+ implied_merkle_root = hashes.hash_merkle_root(
+ electrum_proof["merkle"], txid, electrum_proof["pos"])
+ if implied_merkle_root != electrum_proof["merkleroot"]:
+ raise ValueError
+ txheader = get_block_header(rpc, tx["blockhash"])
+ reply = {"block_height": txheader["block_height"], "pos":
+ electrum_proof["pos"], "merkle": electrum_proof["merkle"]}
+ except (ValueError, JsonRpcError) as e:
+ logger.warning("merkle proof failed for " + txid + " err=" + repr(e))
+ #so reply with an invalid proof which electrum handles without
+ # disconnecting us
+ #https://github.com/spesmilo/electrum/blob/c8e67e2bd07efe042703bc1368d499c5e555f854/lib/verifier.py#L74
+ reply = {"block_height": 1, "pos": 0, "merkle": [txid]}
+ send_response(sock, query, reply)
+ elif method == "blockchain.scripthash.subscribe":
+ scrhash = query["params"][0]
+ if txmonitor.subscribe_address(scrhash):
+ history_hash = txmonitor.get_electrum_history_hash(scrhash)
+ else:
+ logger.warning("address scripthash not known to server: " + scrhash)
+ history_hash = hashes.get_status_electrum([])
+ send_response(sock, query, history_hash)
+ elif method == "blockchain.scripthash.get_history":
+ scrhash = query["params"][0]
+ history = txmonitor.get_electrum_history(scrhash)
+ if history == None:
+ history = []
+ logger.warning("address scripthash history not known to server: "
+ + scrhash)
+ send_response(sock, query, history)
+ elif method == "blockchain.headers.subscribe":
+ subscribed_to_headers[0] = True
+ if len(query["params"]) > 0:
+ are_headers_raw[0] = query["params"][0]
+ new_bestblockhash, header = get_current_header(rpc, are_headers_raw[0])
+ send_response(sock, query, header)
+ elif method == "blockchain.block.get_header":
+ height = query["params"][0]
+ try:
+ blockhash = rpc.call("getblockhash", [height])
+ header = get_block_header(rpc, blockhash)
+ send_response(sock, query, header)
+ except JsonRpcError:
+ error = {"message": "height " + str(height) + " out of range",
+ "code": -1}
+ send_error(sock, query["id"], error)
+ elif method == "blockchain.block.headers":
+ MAX_CHUNK_SIZE = 2016
+ start_height = query["params"][0]
+ count = query["params"][1]
+ count = min(count, MAX_CHUNK_SIZE)
+ headers_hex, n = get_block_headers_hex(rpc, start_height, count)
+ send_response(sock, query, {'hex': headers_hex, 'count': n, 'max':
+ MAX_CHUNK_SIZE})
+ elif method == "blockchain.block.get_chunk":
+ RETARGET_INTERVAL = 2016
+ index = query["params"][0]
+ tip_height = rpc.call("getblockchaininfo", [])["headers"]
+ #logic copied from kyuupichan's electrumx get_chunk() in controller.py
+ next_height = tip_height + 1
+ start_height = min(index*RETARGET_INTERVAL, next_height)
+ count = min(next_height - start_height, RETARGET_INTERVAL)
+ headers_hex, n = get_block_headers_hex(rpc, start_height, count)
+ send_response(sock, query, headers_hex)
+ elif method == "blockchain.transaction.broadcast":
+ try:
+ result = rpc.call("sendrawtransaction", [query["params"][0]])
+ except JsonRpcError as e:
+ result = str(e)
+ logger.debug("tx broadcast result = " + str(result))
+ send_response(sock, query, result)
+ elif method == "mempool.get_fee_histogram":
+ mempool = rpc.call("getrawmempool", [True])
+
+ #algorithm copied from the relevant place in ElectrumX
+ #https://github.com/kyuupichan/electrumx/blob/e92c9bd4861c1e35989ad2773d33e01219d33280/server/mempool.py
+ fee_hist = defaultdict(int)
+ for txid, details in mempool.items():
+ fee_rate = 1e8*details["fee"] // details["size"]
+ fee_hist[fee_rate] += details["size"]
+
+ l = list(reversed(sorted(fee_hist.items())))
+ out = []
+ size = 0
+ r = 0
+ binsize = 100000
+ for fee, s in l:
+ size += s
+ if size + r > binsize:
+ out.append((fee, size))
+ r += size - binsize
+ size = 0
+ binsize *= 1.1
+
+ result = out
+ send_response(sock, query, result)
+ elif method == "blockchain.estimatefee":
+ estimate = rpc.call("estimatesmartfee", [query["params"][0]])
+ feerate = 0.0001
+ if "feerate" in estimate:
+ feerate = estimate["feerate"]
+ send_response(sock, query, feerate)
+ elif method == "blockchain.relayfee":
+ networkinfo = rpc.call("getnetworkinfo", [])
+ send_response(sock, query, networkinfo["relayfee"])
+ elif method == "server.banner":
+ networkinfo = rpc.call("getnetworkinfo", [])
+ blockchaininfo = rpc.call("getblockchaininfo", [])
+ uptime = rpc.call("uptime", [])
+ nettotals = rpc.call("getnettotals", [])
+ send_response(sock, query, BANNER.format(
+ detwallets=len(txmonitor.deterministic_wallets),
+ addr=len(txmonitor.address_history),
+ useragent=networkinfo["subversion"],
+ peers=networkinfo["connections"],
+ uptime=str(datetime.timedelta(seconds=uptime)),
+ blocksonly=not networkinfo["localrelay"],
+ pruning=blockchaininfo["pruned"],
+ recvbytes=hashes.bytes_fmt(nettotals["totalbytesrecv"]),
+ sentbytes=hashes.bytes_fmt(nettotals["totalbytessent"]),
+ donationaddr=DONATION_ADDR))
+ elif method == "server.donation_address":
+ send_response(sock, query, DONATION_ADDR)
+ elif method == "server.version":
+ send_response(sock, query, ["ElectrumPersonalServer "
+ + VERSION_NUMBER, VERSION_NUMBER])
+ elif method == "server.peers.subscribe":
+ send_response(sock, query, []) #no peers to report
+ else:
+ logger.error("*** BUG! Not handling method: " + method + " query=" + str(query))
+ #TODO just send back the same query with result = []
+
+def get_block_header(rpc, blockhash, raw=False):
+ rpc_head = rpc.call("getblockheader", [blockhash])
+ if "previousblockhash" in rpc_head:
+ prevblockhash = rpc_head["previousblockhash"]
+ else:
+ prevblockhash = "00"*32 #genesis block
+ if raw:
+ head_hex = struct.pack("<i32s32sIII", rpc_head["version"],
+ binascii.unhexlify(prevblockhash)[::-1],
+ binascii.unhexlify(rpc_head["merkleroot"])[::-1],
+ rpc_head["time"], int(rpc_head["bits"], 16), rpc_head["nonce"])
+ head_hex = binascii.hexlify(head_hex).decode("utf-8")
+ header = {"hex": head_hex, "height": rpc_head["height"]}
+ else:
+ header = {"block_height": rpc_head["height"],
+ "prev_block_hash": prevblockhash,
+ "timestamp": rpc_head["time"],
+ "merkle_root": rpc_head["merkleroot"],
+ "version": rpc_head["version"],
+ "nonce": rpc_head["nonce"],
+ "bits": int(rpc_head["bits"], 16)}
+ return header
+
+def get_current_header(rpc, raw):
+ new_bestblockhash = rpc.call("getbestblockhash", [])
+ header = get_block_header(rpc, new_bestblockhash, raw)
+ return new_bestblockhash, header
+
+def check_for_new_blockchain_tip(rpc, raw):
+ new_bestblockhash, header = get_current_header(rpc, raw)
+ is_tip_new = bestblockhash[0] != new_bestblockhash
+ bestblockhash[0] = new_bestblockhash
+ return is_tip_new, header
+
+def get_block_headers_hex(rpc, start_height, count):
+ #read count number of headers starting from start_height
+ result = bytearray()
+ try:
+ the_hash = rpc.call("getblockhash", [start_height])
+ except JsonRpcError as e:
+ return "", 0
+ for i in range(count):
+ header = rpc.call("getblockheader", [the_hash])
+ #add header hex to result
+ if "previousblockhash" in header:
+ prevblockhash = header["previousblockhash"]
+ else:
+ prevblockhash = "00"*32 #genesis block
+ h1 = struct.pack("<i32s32sIII", header["version"],
+ binascii.unhexlify(prevblockhash)[::-1],
+ binascii.unhexlify(header["merkleroot"])[::-1],
+ header["time"], int(header["bits"], 16), header["nonce"])
+ result.extend(h1)
+ if "nextblockhash" not in header:
+ break
+ the_hash = header["nextblockhash"]
+ return binascii.hexlify(result).decode("utf-8"), int(len(result)/80)
+
+def create_server_socket(hostport):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ server_sock = socket.socket()
+ server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ server_sock.bind(hostport)
+ server_sock.listen(1)
+ logger.info("Listening for Electrum Wallet on " + str(hostport))
+ return server_sock
+
+def run_electrum_server(rpc, txmonitor, hostport, ip_whitelist,
+ poll_interval_listening, poll_interval_connected, certfile, keyfile):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.info("Starting electrum server")
+ server_sock = create_server_socket(hostport)
+ server_sock.settimeout(poll_interval_listening)
+ while True:
+ try:
+ sock = None
+ while sock == None:
+ try:
+ sock, addr = server_sock.accept()
+ if not any([ip_address(addr[0]) in ipnet
+ for ipnet in ip_whitelist]):
+ logger.debug(addr[0] + " not in whitelist, closing")
+ raise ConnectionRefusedError()
+ sock = ssl.wrap_socket(sock, server_side=True,
+ certfile=certfile, keyfile=keyfile,
+ ssl_version=ssl.PROTOCOL_SSLv23)
+ except socket.timeout:
+ on_heartbeat_listening(txmonitor)
+ except (ConnectionRefusedError, ssl.SSLError):
+ sock.close()
+ sock = None
+
+ logger.info('Electrum connected from ' + str(addr))
+ sock.settimeout(poll_interval_connected)
+ recv_buffer = bytearray()
+ while True:
+ try:
+ recv_data = sock.recv(4096)
+ if not recv_data or len(recv_data) == 0:
+ raise EOFError()
+ recv_buffer.extend(recv_data)
+ lb = recv_buffer.find(b'\n')
+ if lb == -1:
+ continue
+ while lb != -1:
+ line = recv_buffer[:lb].rstrip()
+ recv_buffer = recv_buffer[lb + 1:]
+ lb = recv_buffer.find(b'\n')
+ handle_query(sock, line.decode("utf-8"), rpc,
+ txmonitor)
+ except socket.timeout:
+ on_heartbeat_connected(sock, rpc, txmonitor)
+ except (IOError, EOFError) as e:
+ if isinstance(e, EOFError):
+ logger.info("Electrum wallet disconnected")
+ else:
+ logger.error("IOError: " + repr(e))
+ try:
+ sock.close()
+ except IOError:
+ pass
+ sock = None
+ on_disconnect(txmonitor)
+ time.sleep(0.2)
+
+def get_scriptpubkeys_to_monitor(rpc, config):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.info("Obtaining bitcoin addresses to monitor . . .")
+ st = time.time()
+ try:
+ imported_addresses = set(rpc.call("getaddressesbyaccount",
+ [transactionmonitor.ADDRESSES_LABEL]))
+ logger.debug("using deprecated accounts interface")
+ except JsonRpcError:
+ #bitcoin core 0.17 deprecates accounts, replaced with labels
+ if transactionmonitor.ADDRESSES_LABEL in rpc.call("listlabels", []):
+ imported_addresses = set(rpc.call("getaddressesbylabel",
+ [transactionmonitor.ADDRESSES_LABEL]).keys())
+ else:
+ #no label, no addresses imported at all
+ imported_addresses = set()
+ logger.debug("already-imported addresses = " + str(imported_addresses))
+
+ deterministic_wallets = []
+ for key in config.options("master-public-keys"):
+ wal = deterministicwallet.parse_electrum_master_public_key(
+ config.get("master-public-keys", key),
+ int(config.get("bitcoin-rpc", "gap_limit")))
+ deterministic_wallets.append(wal)
+
+ #check whether these deterministic wallets have already been imported
+ import_needed = False
+ wallets_imported = 0
+ spks_to_import = []
+ for wal in deterministic_wallets:
+ first_addr = hashes.script_to_address(wal.get_scriptpubkeys(change=0,
+ from_index=0, count=1)[0], rpc)
+ if first_addr not in imported_addresses:
+ import_needed = True
+ wallets_imported += 1
+ for change in [0, 1]:
+ spks_to_import.extend(wal.get_scriptpubkeys(change, 0,
+ int(config.get("bitcoin-rpc", "initial_import_count"))))
+ #check whether watch-only addresses have been imported
+ watch_only_addresses = []
+ for key in config.options("watch-only-addresses"):
+ watch_only_addresses.extend(config.get("watch-only-addresses",
+ key).split(' '))
+ watch_only_addresses = set(watch_only_addresses)
+ watch_only_addresses_to_import = []
+ if not watch_only_addresses.issubset(imported_addresses):
+ import_needed = True
+ watch_only_addresses_to_import = (watch_only_addresses -
+ imported_addresses)
+
+ #if addresses need to be imported then return them
+ if import_needed:
+ addresses_to_import = [hashes.script_to_address(spk, rpc)
+ for spk in spks_to_import]
+ #TODO minus imported_addresses
+ logger.info("Importing " + str(wallets_imported) + " wallets and " +
+ str(len(watch_only_addresses_to_import)) + " watch-only " +
+ "addresses into the Bitcoin node")
+ time.sleep(5)
+ return (True, addresses_to_import + list(
+ watch_only_addresses_to_import), None)
+
+ #test
+ # importing one det wallet and no addrs, two det wallets and no addrs
+ # no det wallets and some addrs, some det wallets and some addrs
+
+ #at this point we know we dont need to import any addresses
+ #find which index the deterministic wallets are up to
+ spks_to_monitor = []
+ for wal in deterministic_wallets:
+ for change in [0, 1]:
+ spks_to_monitor.extend(wal.get_scriptpubkeys(change, 0,
+ int(config.get("bitcoin-rpc", "initial_import_count"))))
+ #loop until one address found that isnt imported
+ while True:
+ spk = wal.get_new_scriptpubkeys(change, count=1)[0]
+ spks_to_monitor.append(spk)
+ if hashes.script_to_address(spk, rpc) not in imported_addresses:
+ break
+ spks_to_monitor.pop()
+ wal.rewind_one(change)
+
+ spks_to_monitor.extend([hashes.address_to_script(addr, rpc)
+ for addr in watch_only_addresses])
+ et = time.time()
+ logger.info("Obtained list of addresses to monitor in " + str(et - st) + "sec")
+ return False, spks_to_monitor, deterministic_wallets
+
+def get_certs(config):
+ from pkg_resources import resource_filename
+ from electrumpersonalserver import __certfile__, __keyfile__
+
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ certfile = config.get('electrum-server', 'certfile', fallback=None)
+ keyfile = config.get('electrum-server', 'keyfile', fallback=None)
+ if (certfile and keyfile) and \
+ (os.path.exists(certfile) and os.path.exists(keyfile)):
+ return certfile, keyfile
+ else:
+ certfile = resource_filename('electrumpersonalserver', __certfile__)
+ keyfile = resource_filename('electrumpersonalserver', __keyfile__)
+ if os.path.exists(certfile) and os.path.exists(keyfile):
+ logger.info('using cert: {}, key: {}'.format(certfile, keyfile))
+ return certfile, keyfile
+ else:
+ raise ValueError('invalid cert: {}, key: {}'.format(
+ certfile, keyfile))
+
+def obtain_rpc_username_password(datadir):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ if len(datadir.strip()) == 0:
+ logger.debug("no datadir configuration, checking in default location")
+ systemname = platform.system()
+ #paths from https://en.bitcoin.it/wiki/Data_directory
+ if systemname == "Linux":
+ datadir = os.path.expanduser("~/.bitcoin")
+ elif systemname == "Windows":
+ datadir = os.path.expandvars("%APPDATA%\Bitcoin")
+ elif systemname == "Darwin": #mac os
+ datadir = os.path.expanduser(
+ "~/Library/Application Support/Bitcoin/")
+ cookie_path = os.path.join(datadir, ".cookie")
+ if not os.path.exists(cookie_path):
+ logger.warning("Unable to find .cookie file, try setting `datadir` config")
+ return None, None
+ fd = open(cookie_path)
+ username, password = fd.read().strip().split(":")
+ fd.close()
+ return username, password
+
+def parse_args():
+ from argparse import ArgumentParser
+ from tempfile import gettempdir
+
+ parser = ArgumentParser(description='Electrum Personal Server daemon')
+ parser.add_argument('-c', '--conf', required=True,
+ help='configuration file (mandatory)')
+ parser.add_argument('-l', '--log', help='log file',
+ default='{}/electrumpersonalserver.log'.format(gettempdir()))
+ parser.add_argument('-a', '--appendlog', action='store_true',
+ help='append to log file')
+ logfmt = '%(levelname)s:%(asctime)s: %(message)s'
+ parser.add_argument('-f', '--logfmt', default=logfmt,
+ help='log format')
+ loglvls = [l for l in ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')]
+ parser.add_argument('--loglevel', default='DEBUG', choices=loglvls,
+ help='log levels')
+ return parser.parse_args()
+
+def main():
+ opts = parse_args()
+
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger = logger_config(logger, fmt=opts.logfmt, filename=opts.log,
+ logfilemode='a' if opts.appendlog else 'w')
+ logger.setLevel(opts.loglevel)
+ logger.info('Starting Electrum Personal Server')
+ logger.info(f'Logging to {opts.log}')
+ try:
+ config = ConfigParser()
+ config.read(opts.conf)
+ config.options("master-public-keys")
+ except NoSectionError:
+ logger.error("Non-existant configuration file {}".format(opts.conf))
+ return
+ try:
+ rpc_u = config.get("bitcoin-rpc", "rpc_user")
+ rpc_p = config.get("bitcoin-rpc", "rpc_password")
+ logger.debug("obtaining auth from rpc_user/pass")
+ except NoOptionError:
+ rpc_u, rpc_p = obtain_rpc_username_password(config.get(
+ "bitcoin-rpc", "datadir"))
+ logger.debug("obtaining auth from .cookie")
+ if rpc_u == None:
+ return
+ rpc = JsonRpc(host = config.get("bitcoin-rpc", "host"),
+ port = int(config.get("bitcoin-rpc", "port")),
+ user = rpc_u, password = rpc_p,
+ wallet_filename=config.get("bitcoin-rpc", "wallet_filename").strip())
+
+ #TODO somewhere here loop until rpc works and fully sync'd, to allow
+ # people to run this script without waiting for their node to fully
+ # catch up sync'd when getblockchaininfo blocks == headers, or use
+ # verificationprogress
+ printed_error_msg = False
+ while bestblockhash[0] == None:
+ try:
+ bestblockhash[0] = rpc.call("getbestblockhash", [])
+ except JsonRpcError as e:
+ if not printed_error_msg:
+ logger.error("Error with bitcoin json-rpc: " + repr(e))
+ printed_error_msg = True
+ time.sleep(5)
+
+ import_needed, relevant_spks_addrs, deterministic_wallets = \
+ get_scriptpubkeys_to_monitor(rpc, config)
+ if import_needed:
+ transactionmonitor.import_addresses(rpc, relevant_spks_addrs)
+ logger.info("Done.\nIf recovering a wallet which already has existing " +
+ "transactions, then\nrun the rescan script. If you're confident " +
+ "that the wallets are new\nand empty then there's no need to " +
+ "rescan, just restart this script")
+ else:
+ txmonitor = transactionmonitor.TransactionMonitor(rpc,
+ deterministic_wallets)
+ if not txmonitor.build_address_history(relevant_spks_addrs):
+ return
+ hostport = (config.get("electrum-server", "host"),
+ int(config.get("electrum-server", "port")))
+ ip_whitelist = []
+ for ip in config.get("electrum-server", "ip_whitelist").split(" "):
+ if ip == "*":
+ #matches everything
+ ip_whitelist.append(ip_network("0.0.0.0/0"))
+ ip_whitelist.append(ip_network("::0/0"))
+ else:
+ ip_whitelist.append(ip_network(ip, strict=False))
+ poll_interval_listening = int(config.get("bitcoin-rpc",
+ "poll_interval_listening"))
+ poll_interval_connected = int(config.get("bitcoin-rpc",
+ "poll_interval_connected"))
+ certfile, keyfile = get_certs(config)
+ try:
+ run_electrum_server(rpc, txmonitor, hostport, ip_whitelist,
+ poll_interval_listening,
+ poll_interval_connected, certfile, keyfile)
+ except KeyboardInterrupt:
+ logger.info('Received KeyboardInterrupt, quitting')
+
+def search_for_block_height_of_date(datestr, rpc):
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ target_time = datetime.datetime.strptime(datestr, "%d/%m/%Y")
+ bestblockhash = rpc.call("getbestblockhash", [])
+ best_head = rpc.call("getblockheader", [bestblockhash])
+ if target_time > datetime.datetime.fromtimestamp(best_head["time"]):
+ logger.error("date in the future")
+ return -1
+ genesis_block = rpc.call("getblockheader", [rpc.call("getblockhash", [0])])
+ if target_time < datetime.datetime.fromtimestamp(genesis_block["time"]):
+ logger.warning("date is before the creation of bitcoin")
+ return 0
+ first_height = 0
+ last_height = best_head["height"]
+ while True:
+ m = (first_height + last_height) // 2
+ m_header = rpc.call("getblockheader", [rpc.call("getblockhash", [m])])
+ m_header_time = datetime.datetime.fromtimestamp(m_header["time"])
+ m_time_diff = (m_header_time - target_time).total_seconds()
+ if abs(m_time_diff) < 60*60*2: #2 hours
+ return m_header["height"]
+ elif m_time_diff < 0:
+ first_height = m
+ elif m_time_diff > 0:
+ last_height = m
+ else:
+ return -1
+
+def rescan():
+ opts = parse_args()
+
+ logger = logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger = logger_config(logger, fmt=opts.logfmt, filename=opts.log,
+ logfilemode='a' if opts.appendlog else 'w')
+ logger.setLevel(opts.loglevel)
+ logger.info('Starting Electrum Personal Server in rescan mode')
+ logger.info(f'Logging to {opts.log}')
+ try:
+ config = ConfigParser()
+ config.read(opts.conf)
+ config.options("master-public-keys")
+ except NoSectionError:
+ logger.error("Non-existant configuration file {}".format(opts.conf))
+ return
+ try:
+ rpc_u = config.get("bitcoin-rpc", "rpc_user")
+ rpc_p = config.get("bitcoin-rpc", "rpc_password")
+ except NoOptionError:
+ rpc_u, rpc_p = obtain_rpc_username_password(config.get(
+ "bitcoin-rpc", "datadir"))
+ if rpc_u == None:
+ return
+ rpc = JsonRpc(host = config.get("bitcoin-rpc", "host"),
+ port = int(config.get("bitcoin-rpc", "port")),
+ user = rpc_u, password = rpc_p,
+ wallet_filename=config.get("bitcoin-rpc", "wallet_filename").strip())
+ user_input = input("Enter earliest wallet creation date (DD/MM/YYYY) "
+ "or block height to rescan from: ")
+ try:
+ height = int(user_input)
+ except ValueError:
+ height = search_for_block_height_of_date(user_input, rpc)
+ if height == -1:
+ return
+ height -= 2016 #go back two weeks for safety
+
+ if input("Rescan from block height " + str(height) + " ? (y/n):") != 'y':
+ return
+ rpc.call("rescanblockchain", [height])
+ logger.info("end")
diff --git a/electrumpersonalserver/server/deterministicwallet.py b/electrumpersonalserver/server/deterministicwallet.py
@@ -0,0 +1,221 @@
+
+import electrumpersonalserver.bitcoin as btc
+from electrumpersonalserver.server.hashes import bh2u, hash_160, bfh, sha256
+
+# the class hierarchy for deterministic wallets in this file:
+# subclasses are written towards the right
+# each class knows how to create the scriptPubKeys of that wallet
+#
+# |-- SingleSigOldMnemonicWallet
+# |-- SingleSigP2PKHWallet
+# |-- SingleSigP2WPKHWallet
+# SingleSigWallet --|
+# / |-- SingleSigP2WPKH_P2SHWallet
+# DeterministicWallet
+# \ |-- MultisigP2SHWallet
+# MultisigWallet --|
+# |-- MultisigP2WSHWallet
+# |-- MultisigP2WSH_P2SHWallet
+
+#the wallet types are here
+#https://github.com/spesmilo/electrum/blob/3.0.6/RELEASE-NOTES
+#and
+#https://github.com/spesmilo/electrum-docs/blob/master/xpub_version_bytes.rst
+
+def is_string_parsable_as_hex_int(s):
+ try:
+ int(s, 16)
+ return True
+ except:
+ return False
+
+def parse_electrum_master_public_key(keydata, gaplimit):
+ if keydata[:4] in ("xpub", "tpub"):
+ wallet = SingleSigP2PKHWallet(keydata)
+ elif keydata[:4] in ("zpub", "vpub"):
+ wallet = SingleSigP2WPKHWallet(keydata)
+ elif keydata[:4] in ("ypub", "upub"):
+ wallet = SingleSigP2WPKH_P2SHWallet(keydata)
+ elif keydata.find(" ") != -1: #multiple keys = multisig
+ chunks = keydata.split(" ")
+ try:
+ m = int(chunks[0])
+ except ValueError:
+ raise ValueError("Unable to parse m in multisig key data: "
+ + chunks[0])
+ pubkeys = chunks[1:]
+ if not all([pubkeys[0][:4] == pub[:4] for pub in pubkeys[1:]]):
+ raise ValueError("Inconsistent master public key types")
+ if pubkeys[0][:4] in ("xpub", "tpub"):
+ wallet = MultisigP2SHWallet(m, pubkeys)
+ elif pubkeys[0][:4] in ("Zpub", "Vpub"):
+ wallet = MultisigP2WSHWallet(m, pubkeys)
+ elif pubkeys[0][:4] in ("Ypub", "Upub"):
+ wallet = MultisigP2WSH_P2SHWallet(m, pubkeys)
+ elif is_string_parsable_as_hex_int(keydata) and len(keydata) == 128:
+ wallet = SingleSigOldMnemonicWallet(keydata)
+ else:
+ raise ValueError("Unrecognized electrum mpk format: " + keydata[:4])
+ wallet.gaplimit = gaplimit
+ return wallet
+
+class DeterministicWallet(object):
+ def __init__(self):
+ self.gaplimit = 0
+ self.next_index = [0, 0]
+ self.scriptpubkey_index = {}
+
+ def get_new_scriptpubkeys(self, change, count):
+ """Returns newly-generated addresses from this deterministic wallet"""
+ return self.get_scriptpubkeys(change, self.next_index[change],
+ count)
+
+ def get_scriptpubkeys(self, change, from_index, count):
+ """Returns addresses from this deterministic wallet"""
+ pass
+
+ #called in check_for_new_txes() when a new tx of ours arrives
+ #to see if we need to import more addresses
+ def have_scriptpubkeys_overrun_gaplimit(self, scriptpubkeys):
+ """Return None if they havent, or how many addresses to
+ import if they have"""
+ result = {}
+ for spk in scriptpubkeys:
+ if spk not in self.scriptpubkey_index:
+ continue
+ change, index = self.scriptpubkey_index[spk]
+ distance_from_next = self.next_index[change] - index
+ if distance_from_next > self.gaplimit:
+ continue
+ #need to import more
+ if change in result:
+ result[change] = max(result[change], self.gaplimit
+ - distance_from_next + 1)
+ else:
+ result[change] = self.gaplimit - distance_from_next + 1
+ if len(result) > 0:
+ return result
+ else:
+ return None
+
+ def rewind_one(self, change):
+ """Go back one pubkey in a branch"""
+ self.next_index[change] -= 1
+
+class SingleSigWallet(DeterministicWallet):
+ def __init__(self, mpk):
+ super(SingleSigWallet, self).__init__()
+ try:
+ self.branches = (btc.bip32_ckd(mpk, 0), btc.bip32_ckd(mpk, 1))
+ except Exception:
+ raise ValueError("Bad master public key format. Get it from " +
+ "Electrum menu `Wallet` -> `Information`")
+ #m/change/i
+
+ def pubkey_to_scriptpubkey(self, pubkey):
+ raise RuntimeError()
+
+ def get_pubkey(self, change, index):
+ return btc.bip32_extract_key(btc.bip32_ckd(self.branches[change],
+ index))
+
+ def get_scriptpubkeys(self, change, from_index, count):
+ result = []
+ for index in range(from_index, from_index + count):
+ pubkey = self.get_pubkey(change, index)
+ scriptpubkey = self.pubkey_to_scriptpubkey(pubkey)
+ self.scriptpubkey_index[scriptpubkey] = (change, index)
+ result.append(scriptpubkey)
+ self.next_index[change] = max(self.next_index[change], from_index+count)
+ return result
+
+class SingleSigP2PKHWallet(SingleSigWallet):
+ def pubkey_to_scriptpubkey(self, pubkey):
+ pkh = bh2u(hash_160(bfh(pubkey)))
+ #op_dup op_hash_160 length hash160 op_equalverify op_checksig
+ return "76a914" + pkh + "88ac"
+
+class SingleSigP2WPKHWallet(SingleSigWallet):
+ def pubkey_to_scriptpubkey(self, pubkey):
+ pkh = bh2u(hash_160(bfh(pubkey)))
+ #witness-version length hash160
+ #witness version is always 0, length is always 0x14
+ return "0014" + pkh
+
+class SingleSigP2WPKH_P2SHWallet(SingleSigWallet):
+ def pubkey_to_scriptpubkey(self, pubkey):
+ #witness-version length pubkeyhash
+ #witness version is always 0, length is always 0x14
+ redeem_script = '0014' + bh2u(hash_160(bfh(pubkey)))
+ sh = bh2u(hash_160(bfh(redeem_script)))
+ return "a914" + sh + "87"
+
+class SingleSigOldMnemonicWallet(SingleSigWallet):
+ def __init__(self, mpk):
+ super(SingleSigWallet, self).__init__()
+ self.mpk = mpk
+
+ def get_pubkey(self, change, index):
+ return btc.electrum_pubkey(self.mpk, index, change)
+
+ def pubkey_to_scriptpubkey(self, pubkey):
+ pkh = bh2u(hash_160(bfh(pubkey)))
+ #op_dup op_hash_160 length hash160 op_equalverify op_checksig
+ return "76a914" + pkh + "88ac"
+
+class MultisigWallet(DeterministicWallet):
+ def __init__(self, m, mpk_list):
+ super(MultisigWallet, self).__init__()
+ self.m = m
+ try:
+ self.pubkey_branches = [(btc.bip32_ckd(mpk, 0), btc.bip32_ckd(mpk,
+ 1)) for mpk in mpk_list]
+ except Exception:
+ raise ValueError("Bad master public key format. Get it from " +
+ "Electrum menu `Wallet` -> `Information`")
+ #derivation path for pubkeys is m/change/index
+
+ def redeem_script_to_scriptpubkey(self, redeem_script):
+ raise RuntimeError()
+
+ def get_scriptpubkeys(self, change, from_index, count):
+ result = []
+ for index in range(from_index, from_index + count):
+ pubkeys = [btc.bip32_extract_key(btc.bip32_ckd(branch[change],
+ index)) for branch in self.pubkey_branches]
+ pubkeys = sorted(pubkeys)
+ redeemScript = ""
+ redeemScript += "%x"%(0x50 + self.m) #op_m
+ for p in pubkeys:
+ redeemScript += "21" #length
+ redeemScript += p
+ redeemScript += "%x"%(0x50 + len(pubkeys)) #op_n
+ redeemScript += "ae" # op_checkmultisig
+ scriptpubkey = self.redeem_script_to_scriptpubkey(redeemScript)
+ self.scriptpubkey_index[scriptpubkey] = (change, index)
+ result.append(scriptpubkey)
+ self.next_index[change] = max(self.next_index[change], from_index+count)
+ return result
+
+class MultisigP2SHWallet(MultisigWallet):
+ def redeem_script_to_scriptpubkey(self, redeem_script):
+ sh = bh2u(hash_160(bfh(redeem_script)))
+ #op_hash160 length hash160 op_equal
+ return "a914" + sh + "87"
+
+class MultisigP2WSHWallet(MultisigWallet):
+ def redeem_script_to_scriptpubkey(self, redeem_script):
+ sh = bh2u(sha256(bfh(redeem_script)))
+ #witness-version length sha256
+ #witness version is always 0, length is always 0x20
+ return "0020" + sh
+
+class MultisigP2WSH_P2SHWallet(MultisigWallet):
+ def redeem_script_to_scriptpubkey(self, redeem_script):
+ #witness-version length sha256
+ #witness version is always 0, length is always 0x20
+ nested_redeemScript = "0020" + bh2u(sha256(bfh(redeem_script)))
+ sh = bh2u(hash_160(bfh(nested_redeemScript)))
+ #op_hash160 length hash160 op_equal
+ return "a914" + sh + "87"
+
diff --git a/electrumpersonalserver/hashes.py b/electrumpersonalserver/server/hashes.py
diff --git a/electrumpersonalserver/jsonrpc.py b/electrumpersonalserver/server/jsonrpc.py
diff --git a/electrumpersonalserver/server/merkleproof.py b/electrumpersonalserver/server/merkleproof.py
@@ -0,0 +1,144 @@
+
+import electrumpersonalserver.bitcoin as btc
+import binascii
+from math import ceil, log
+
+from electrumpersonalserver.server.hashes import Hash, hash_encode, hash_decode
+
+#lots of ideas and code taken from bitcoin core and breadwallet
+#https://github.com/bitcoin/bitcoin/blob/master/src/merkleblock.h
+#https://github.com/breadwallet/breadwallet-core/blob/master/BRMerkleBlock.c
+
+def calc_tree_width(height, txcount):
+ """Efficently calculates the number of nodes at given merkle tree height"""
+ return (txcount + (1 << height) - 1) >> height
+
+def decend_merkle_tree(hashes, flags, height, txcount, pos):
+ """Function recursively follows the flags bitstring down into the
+ tree, building up a tree in memory"""
+ flag = next(flags)
+ if height > 0:
+ #non-txid node
+ if flag:
+ left = decend_merkle_tree(hashes, flags, height-1, txcount, pos*2)
+ #bitcoin's merkle tree format has a rule that if theres an
+ # odd number of nodes in then the tree, the last hash is duplicated
+ #in the electrum format we must hash together the duplicate
+ # tree branch
+ if pos*2+1 < calc_tree_width(height-1, txcount):
+ right = decend_merkle_tree(hashes, flags, height-1,
+ txcount, pos*2+1)
+ else:
+ if isinstance(left, tuple):
+ right = expand_tree_hashing(left)
+ else:
+ right = left
+ return (left, right)
+ else:
+ hs = next(hashes)
+ return hs
+ else:
+ #txid node
+ hs = next(hashes)
+ if flag:
+ #for the actual transaction, also store its position with a flag
+ return "tx:" + str(pos) + ":" + hs
+ else:
+ return hs
+
+def deserialize_core_format_merkle_proof(hash_list, flag_value, txcount):
+ """Converts core's format for a merkle proof into a tree in memory"""
+ tree_depth = int(ceil(log(txcount, 2)))
+ hashes = iter(hash_list)
+ #one-liner which converts the flags value to a list of True/False bits
+ flags = (flag_value[i//8]&1 << i%8 != 0 for i in range(len(flag_value)*8))
+ try:
+ root_node = decend_merkle_tree(hashes, flags, tree_depth, txcount, 0)
+ return root_node
+ except StopIteration:
+ raise ValueError
+
+def expand_tree_electrum_format_merkle_proof(node, result):
+ """Recurse down into the tree, adding hashes to the result list
+ in depth order"""
+ left, right = node
+ if isinstance(left, tuple):
+ expand_tree_electrum_format_merkle_proof(left, result)
+ if isinstance(right, tuple):
+ expand_tree_electrum_format_merkle_proof(right, result)
+ if not isinstance(left, tuple):
+ result.append(left)
+ if not isinstance(right, tuple):
+ result.append(right)
+
+def get_node_hash(node):
+ if node.startswith("tx"):
+ return node.split(":")[2]
+ else:
+ return node
+
+def expand_tree_hashing(node):
+ """Recurse down into the tree, hashing everything and
+ returning root hash"""
+ left, right = node
+ if isinstance(left, tuple):
+ hash_left = expand_tree_hashing(left)
+ else:
+ hash_left = get_node_hash(left)
+ if isinstance(right, tuple):
+ hash_right = expand_tree_hashing(right)
+ else:
+ hash_right = get_node_hash(right)
+ return hash_encode(Hash(hash_decode(hash_left) + hash_decode(hash_right)))
+
+def convert_core_to_electrum_merkle_proof(proof):
+ """Bitcoin Core and Electrum use different formats for merkle
+ proof, this function converts from Core's format to Electrum's format"""
+ proof = binascii.unhexlify(proof)
+ pos = [0]
+ def read_as_int(bytez):
+ pos[0] += bytez
+ return btc.decode(proof[pos[0] - bytez:pos[0]][::-1], 256)
+ def read_var_int():
+ pos[0] += 1
+ val = btc.from_byte_to_int(proof[pos[0] - 1])
+ if val < 253:
+ return val
+ return read_as_int(pow(2, val - 252))
+ def read_bytes(bytez):
+ pos[0] += bytez
+ return proof[pos[0] - bytez:pos[0]]
+
+ merkle_root = proof[36:36+32]
+ pos[0] = 80
+ txcount = read_as_int(4)
+ hash_count = read_var_int()
+ hashes = [hash_encode(read_bytes(32)) for i in range(hash_count)]
+ flags_count = read_var_int()
+ flags = read_bytes(flags_count)
+
+ root_node = deserialize_core_format_merkle_proof(hashes, flags, txcount)
+ #check special case of a tree of zero height, block with only coinbase tx
+ if not isinstance(root_node, tuple):
+ root_node = root_node[5:] #remove the "tx:0:"
+ result = {"pos": 0, "merkle": [], "txid": root_node,
+ "merkleroot": hash_encode(merkle_root)}
+ return result
+
+ hashes_list = []
+ expand_tree_electrum_format_merkle_proof(root_node, hashes_list)
+ #remove the first or second element which is the txhash
+ tx = hashes_list[0]
+ if hashes_list[1].startswith("tx"):
+ tx = hashes_list[1]
+ assert(tx.startswith("tx"))
+ hashes_list.remove(tx)
+ #if the txhash was duplicated, that _is_ included in electrum's format
+ if hashes_list[0].startswith("tx"):
+ hashes_list[0] = tx.split(":")[2]
+ tx_pos, txid = tx.split(":")[1:3]
+ tx_pos = int(tx_pos)
+ result = {"pos": tx_pos, "merkle": hashes_list, "txid": txid,
+ "merkleroot": hash_encode(merkle_root)}
+ return result
+
diff --git a/electrumpersonalserver/server/transactionmonitor.py b/electrumpersonalserver/server/transactionmonitor.py
@@ -0,0 +1,480 @@
+
+import time, pprint, math, sys
+from decimal import Decimal
+from collections import defaultdict
+import logging
+
+from electrumpersonalserver.server.jsonrpc import JsonRpcError
+from electrumpersonalserver.server.hashes import (
+ get_status_electrum,
+ script_to_scripthash,
+ script_to_address
+)
+
+#internally this code uses scriptPubKeys, it only converts to bitcoin addresses
+# when importing to bitcoind or checking whether enough addresses have been
+# imported
+#the electrum protocol uses sha256(scriptpubkey) as a key for lookups
+# this code calls them scripthashes
+
+#code will generate the first address from each deterministic wallet
+# and check whether they have been imported into the bitcoin node
+# if no then initial_import_count addresses will be imported, then exit
+# if yes then initial_import_count addresses will be generated and extra
+# addresses will be generated one-by-one, each time checking whether they have
+# been imported into the bitcoin node
+# when an address has been reached that has not been imported, that means
+# we've reached the end, then rewind the deterministic wallet index by one
+
+#when a transaction happens paying to an address from a deterministic wallet
+# lookup the position of that address, if its less than gap_limit then
+# import more addresses
+
+ADDRESSES_LABEL = "electrum-watchonly-addresses"
+CONFIRMATIONS_SAFE_FROM_REORG = 100
+
+def import_addresses(rpc, addrs, logger=None):
+ logger = logger if logger else logging.getLogger('ELECTRUMPERSONALSERVER')
+ logger.debug("importing addrs = " + str(addrs))
+ logger.info("Importing " + str(len(addrs)) + " addresses in total")
+ addr_i = iter(addrs)
+ notifications = 10
+ for i in range(notifications):
+ pc = int(100.0 * i / notifications)
+ sys.stdout.write("[" + str(pc) + "%]... ")
+ sys.stdout.flush()
+ for j in range(int(len(addrs) / notifications)):
+ rpc.call("importaddress", [next(addr_i), ADDRESSES_LABEL, False])
+ for a in addr_i: #import the reminder of addresses
+ rpc.call("importaddress", [a, ADDRESSES_LABEL, False])
+ print("[100%]")
+ logger.info("Importing done")
+
+class TransactionMonitor(object):
+ """
+ Class which monitors the bitcoind wallet for new transactions
+ and builds a history datastructure for sending to electrum
+ """
+ def __init__(self, rpc, deterministic_wallets, logger=None):
+ self.rpc = rpc
+ self.deterministic_wallets = deterministic_wallets
+ self.last_known_wallet_txid = None
+ self.address_history = None
+ self.unconfirmed_txes = None
+ self.reorganizable_txes = None
+ self.logger = logger if logger else logging.getLogger('ELECTRUMPERSONALSERVER')
+
+ def get_electrum_history_hash(self, scrhash):
+ return get_status_electrum( [(h["tx_hash"], h["height"])
+ for h in self.address_history[scrhash]["history"]] )
+
+ def get_electrum_history(self, scrhash):
+ if scrhash in self.address_history:
+ return self.address_history[scrhash]["history"]
+ else:
+ return None
+
+ def subscribe_address(self, scrhash):
+ if scrhash in self.address_history:
+ self.address_history[scrhash]["subscribed"] = True
+ return True
+ else:
+ return False
+
+ def unsubscribe_all_addresses(self):
+ for scrhash, his in self.address_history.items():
+ his["subscribed"] = False
+
+ def build_address_history(self, monitored_scriptpubkeys):
+ logger = self.logger
+ logger.info("Building history with " + str(len(monitored_scriptpubkeys)) +
+ " addresses . . .")
+ st = time.time()
+ address_history = {}
+ for spk in monitored_scriptpubkeys:
+ address_history[script_to_scripthash(spk)] = {'history': [],
+ 'subscribed': False}
+ wallet_addr_scripthashes = set(address_history.keys())
+ self.reorganizable_txes = []
+ #populate history
+ #which is a blockheight-ordered list of ("txhash", height)
+ #unconfirmed transactions go at the end as ("txhash", 0, fee)
+ # 0=unconfirmed -1=unconfirmed with unconfirmed parents
+
+ BATCH_SIZE = 1000
+ ret = list(range(BATCH_SIZE))
+ t = 0
+ count = 0
+ obtained_txids = set()
+ last_tx = None
+ while len(ret) == BATCH_SIZE:
+ ret = self.rpc.call("listtransactions", ["*", BATCH_SIZE, t, True])
+ logger.debug("listtransactions skip=" + str(t) + " len(ret)="
+ + str(len(ret)))
+ if t == 0 and len(ret) > 0:
+ last_tx = ret[-1]
+ t += len(ret)
+ for tx in ret:
+ if "txid" not in tx or "category" not in tx:
+ continue
+ if tx["category"] not in ("receive", "send"):
+ continue
+ if tx["confirmations"] < 0:
+ continue #conflicted
+ if tx["txid"] in obtained_txids:
+ continue
+ logger.debug("adding obtained tx=" + str(tx["txid"]))
+ obtained_txids.add(tx["txid"])
+
+ #obtain all the addresses this transaction is involved with
+ output_scriptpubkeys, input_scriptpubkeys, txd = \
+ self.get_input_and_output_scriptpubkeys(tx["txid"])
+ output_scripthashes = [script_to_scripthash(sc)
+ for sc in output_scriptpubkeys]
+ sh_to_add = wallet_addr_scripthashes.intersection(set(
+ output_scripthashes))
+ input_scripthashes = [script_to_scripthash(sc)
+ for sc in input_scriptpubkeys]
+ sh_to_add |= wallet_addr_scripthashes.intersection(set(
+ input_scripthashes))
+ if len(sh_to_add) == 0:
+ continue
+
+ for wal in self.deterministic_wallets:
+ overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit(
+ output_scriptpubkeys)
+ if overrun_depths != None:
+ logger.error("Not enough addresses imported.")
+ logger.error("Delete wallet.dat and increase the value " +
+ "of `initial_import_count` in the file " +
+ "`config.cfg` then reimport and rescan")
+ #TODO make it so users dont have to delete wallet.dat
+ # check whether all initial_import_count addresses are
+ # imported rather than just the first one
+ return False
+ new_history_element = self.generate_new_history_element(tx, txd)
+ for scripthash in sh_to_add:
+ address_history[scripthash][
+ "history"].append(new_history_element)
+ if tx["confirmations"] > 0 and (tx["confirmations"] <
+ CONFIRMATIONS_SAFE_FROM_REORG):
+ self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
+ new_history_element["height"], sh_to_add))
+ count += 1
+
+ unconfirmed_txes = defaultdict(list)
+ for scrhash, his in address_history.items():
+ uctx = self.sort_address_history_list(his)
+ for u in uctx:
+ unconfirmed_txes[u["tx_hash"]].append(scrhash)
+ logger.debug("unconfirmed_txes = " + str(unconfirmed_txes))
+ logger.debug("reorganizable_txes = " + str(self.reorganizable_txes))
+ if len(ret) > 0:
+ #txid doesnt uniquely identify transactions from listtransactions
+ #but the tuple (txid, address) does
+ self.last_known_wallet_txid = (last_tx["txid"],
+ last_tx.get("address", None))
+ else:
+ self.last_known_wallet_txid = None
+ logger.debug("last_known_wallet_txid = " + str(
+ self.last_known_wallet_txid))
+
+ et = time.time()
+ logger.debug("address_history =\n" + pprint.pformat(address_history))
+ logger.info("Found " + str(count) + " txes. History built in "
+ + str(et - st) + "sec")
+ self.address_history = address_history
+ self.unconfirmed_txes = unconfirmed_txes
+ return True
+
+ def get_input_and_output_scriptpubkeys(self, txid):
+ gettx = self.rpc.call("gettransaction", [txid])
+ txd = self.rpc.call("decoderawtransaction", [gettx["hex"]])
+ output_scriptpubkeys = [out["scriptPubKey"]["hex"]
+ for out in txd["vout"]]
+ input_scriptpubkeys = []
+ for inn in txd["vin"]:
+ try:
+ wallet_tx = self.rpc.call("gettransaction", [inn["txid"]])
+ except JsonRpcError:
+ #wallet doesnt know about this tx, so the input isnt ours
+ continue
+ input_decoded = self.rpc.call("decoderawtransaction", [wallet_tx[
+ "hex"]])
+ script = input_decoded["vout"][inn["vout"]]["scriptPubKey"]["hex"]
+ input_scriptpubkeys.append(script)
+ return output_scriptpubkeys, input_scriptpubkeys, txd
+
+ def generate_new_history_element(self, tx, txd):
+ logger = self.logger
+ if tx["confirmations"] == 0:
+ unconfirmed_input = False
+ total_input_value = 0
+ for inn in txd["vin"]:
+ utxo = self.rpc.call("gettxout", [inn["txid"], inn["vout"],
+ True])
+ if utxo is None:
+ utxo = self.rpc.call("gettxout", [inn["txid"], inn["vout"],
+ False])
+ if utxo is None:
+ rawtx = self.rpc.call("getrawtransaction", [inn["txid"],
+ True])
+ if rawtx is not None:
+ utxo = {"confirmations": 0,
+ "value": rawtx["vout"][inn["vout"]]["value"]}
+ if utxo is not None:
+ total_input_value += int(Decimal(utxo["value"]) *
+ Decimal(1e8))
+ unconfirmed_input = (unconfirmed_input or
+ utxo["confirmations"] == 0)
+ else:
+ # Electrum will now display a weird negative fee
+ logger.warning("input utxo not found(!)")
+
+ logger.debug("total_input_value = " + str(total_input_value))
+ fee = total_input_value - sum([int(Decimal(out["value"])
+ * Decimal(1e8)) for out in txd["vout"]])
+ height = -1 if unconfirmed_input else 0
+ new_history_element = ({"tx_hash": tx["txid"], "height": height,
+ "fee": fee})
+ else:
+ blockheader = self.rpc.call("getblockheader", [tx['blockhash']])
+ new_history_element = ({"tx_hash": tx["txid"],
+ "height": blockheader["height"]})
+ return new_history_element
+
+ def sort_address_history_list(self, his):
+ unconfirm_txes = list(filter(lambda h:h["height"] == 0, his["history"]))
+ confirm_txes = filter(lambda h:h["height"] != 0, his["history"])
+ #TODO txes must be "in blockchain order"
+ # the order they appear in the block
+ # it might be "blockindex" in listtransactions and gettransaction
+ #so must sort with key height+':'+blockindex
+ #maybe check if any heights are the same then get the pos only for those
+ #better way to do this is to have a separate dict that isnt in history
+ # which maps txid => blockindex
+ # and then sort by key height+":"+idx[txid]
+ his["history"] = sorted(confirm_txes, key=lambda h:h["height"])
+ his["history"].extend(unconfirm_txes)
+ return unconfirm_txes
+
+ def check_for_updated_txes(self):
+ logger = self.logger
+ updated_scrhashes1 = self.check_for_new_txes()
+ updated_scrhashes2 = self.check_for_confirmations()
+ updated_scrhashes3 = self.check_for_reorganizations()
+ updated_scrhashes = (updated_scrhashes1 | updated_scrhashes2
+ | updated_scrhashes3)
+ for ush in updated_scrhashes:
+ his = self.address_history[ush]
+ self.sort_address_history_list(his)
+ if len(updated_scrhashes) > 0:
+ logger.debug("new tx address_history =\n"
+ + pprint.pformat(self.address_history))
+ logger.debug("unconfirmed txes = "
+ + pprint.pformat(self.unconfirmed_txes))
+ logger.debug("self.reorganizable_txes = "
+ + pprint.pformat(self.reorganizable_txes))
+ logger.debug("updated_scripthashes = " + str(updated_scrhashes))
+ updated_scrhashes = filter(lambda sh:self.address_history[sh][
+ "subscribed"], updated_scrhashes)
+ return updated_scrhashes
+
+ def check_for_reorganizations(self):
+ logger = self.logger
+ elements_removed = []
+ elements_added = []
+ updated_scrhashes = set()
+ logger.debug("reorganizable_txes = " + str(self.reorganizable_txes))
+ for reorgable_tx in self.reorganizable_txes:
+ txid, blockhash, height, scrhashes = reorgable_tx
+ tx = self.rpc.call("gettransaction", [txid])
+ if tx["confirmations"] >= CONFIRMATIONS_SAFE_FROM_REORG:
+ elements_removed.append(reorgable_tx)
+ logger.debug("Transaction considered safe from reorg: " + txid)
+ continue
+ if tx["confirmations"] < 1:
+ updated_scrhashes.update(scrhashes)
+ if tx["confirmations"] == 0:
+ #transaction became unconfirmed in a reorg
+ logger.warning("A transaction was reorg'd out: " + txid)
+ elements_removed.append(reorgable_tx)
+ self.unconfirmed_txes[txid].extend(scrhashes)
+
+ #add to history as unconfirmed
+ txd = self.rpc.call("decoderawtransaction", [tx["hex"]])
+ new_history_element = self.generate_new_history_element(tx,
+ txd)
+ for scrhash in scrhashes:
+ self.address_history[scrhash]["history"].append(
+ new_history_element)
+
+ elif tx["confirmations"] < 0:
+ #tx became conflicted in reorg i.e. a double spend
+ logger.error("A transaction was double spent! " + txid)
+ elements_removed.append(reorgable_tx)
+ elif tx["blockhash"] != blockhash:
+ block = self.rpc.call("getblockheader", [tx["blockhash"]])
+ if block["height"] == height: #reorg but height is the same
+ logger.warning("A transaction was reorg'd but still confirmed " +
+ "at same height: " + txid)
+ continue
+ #reorged but still confirmed at a different height
+ updated_scrhashes.update(scrhashes)
+ logger.warning("A transaction was reorg'd but still confirmed to " +
+ "a new block and different height: " + txid)
+ #update history with the new height
+ for scrhash in scrhashes:
+ for h in self.address_history[scrhash]["history"]:
+ if h["tx_hash"] == txid:
+ h["height"] = block["height"]
+ #modify the reorgable tx with new hash and height
+ elements_removed.append(reorgable_tx)
+ elements_added.append((txid, tx["blockhash"], block["height"],
+ scrhashes))
+ continue
+ else:
+ continue #no change to reorgable tx
+ #remove tx from history
+ for scrhash in scrhashes:
+ deleted_entries = [h for h in self.address_history[scrhash][
+ "history"] if h["tx_hash"] == txid and
+ h["height"] == height]
+ for d_his in deleted_entries:
+ self.address_history[scrhash]["history"].remove(d_his)
+
+ for reorged_tx in elements_removed:
+ self.reorganizable_txes.remove(reorged_tx)
+ self.reorganizable_txes.extend(elements_added)
+ return updated_scrhashes
+
+ def check_for_confirmations(self):
+ logger = self.logger
+ tx_scrhashes_removed_from_mempool = []
+ logger.debug("check4con unconfirmed_txes = "
+ + pprint.pformat(self.unconfirmed_txes))
+ for uc_txid, scrhashes in self.unconfirmed_txes.items():
+ tx = self.rpc.call("gettransaction", [uc_txid])
+ logger.debug("uc_txid=" + uc_txid + " => " + str(tx))
+ if tx["confirmations"] == 0:
+ continue #still unconfirmed
+ tx_scrhashes_removed_from_mempool.append((uc_txid, scrhashes))
+ if tx["confirmations"] > 0:
+ logger.info("A transaction confirmed: " + uc_txid)
+ block = self.rpc.call("getblockheader", [tx["blockhash"]])
+ elif tx["confirmations"] < 0:
+ logger.warning("A transaction became conflicted: " + uc_txid)
+ for scrhash in scrhashes:
+ #delete the old unconfirmed entry in address_history
+ deleted_entries = [h for h in self.address_history[scrhash][
+ "history"] if h["tx_hash"] == uc_txid]
+ for d_his in deleted_entries:
+ self.address_history[scrhash]["history"].remove(d_his)
+ if tx["confirmations"] > 0:
+ #create the new confirmed entry in address_history
+ self.address_history[scrhash]["history"].append({"height":
+ block["height"], "tx_hash": uc_txid})
+ if tx["confirmations"] > 0:
+ self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
+ block["height"], scrhashes))
+ updated_scrhashes = set()
+ for tx, scrhashes in tx_scrhashes_removed_from_mempool:
+ del self.unconfirmed_txes[tx]
+ updated_scrhashes.update(set(scrhashes))
+ return updated_scrhashes
+
+ def check_for_new_txes(self):
+ logger = self.logger
+ MAX_TX_REQUEST_COUNT = 256
+ tx_request_count = 2
+ max_attempts = int(math.log(MAX_TX_REQUEST_COUNT, 2))
+ for i in range(max_attempts):
+ logger.debug("listtransactions tx_request_count="
+ + str(tx_request_count))
+ ##how listtransactions works
+ ##skip and count parameters take most-recent txes first
+ ## so skip=0 count=1 will return the most recent tx
+ ##and skip=0 count=3 will return the 3 most recent txes
+ ##but the actual list returned has the REVERSED order
+ ##skip=0 count=3 will return a list with the most recent tx LAST
+ ret = self.rpc.call("listtransactions", ["*", tx_request_count, 0,
+ True])
+ ret = ret[::-1]
+ if self.last_known_wallet_txid == None:
+ recent_tx_index = len(ret) #=0 means no new txes
+ break
+ else:
+ txid_list = [(tx["txid"], tx.get("address", None))
+ for tx in ret]
+ recent_tx_index = next((i for i, (txid, addr)
+ in enumerate(txid_list) if
+ txid == self.last_known_wallet_txid[0] and
+ addr == self.last_known_wallet_txid[1]), -1)
+ if recent_tx_index != -1:
+ break
+ tx_request_count *= 2
+
+ #TODO low priority: handle a user getting more than 255 new
+ # transactions in 15 seconds
+ logger.debug("recent tx index = " + str(recent_tx_index) + " ret = " +
+ str([(t["txid"], t.get("address", None)) for t in ret]))
+ if len(ret) > 0:
+ self.last_known_wallet_txid = (ret[0]["txid"],
+ ret[0].get("address", None))
+ logger.debug("last_known_wallet_txid = " + str(
+ self.last_known_wallet_txid))
+ assert(recent_tx_index != -1)
+ if recent_tx_index == 0:
+ return set()
+ new_txes = ret[:recent_tx_index][::-1]
+ logger.debug("new txes = " + str(new_txes))
+ obtained_txids = set()
+ updated_scripthashes = []
+ for tx in new_txes:
+ if "txid" not in tx or "category" not in tx:
+ continue
+ if tx["category"] not in ("receive", "send"):
+ continue
+ if tx["confirmations"] < 0:
+ continue #conflicted
+ if tx["txid"] in obtained_txids:
+ continue
+ obtained_txids.add(tx["txid"])
+ output_scriptpubkeys, input_scriptpubkeys, txd = \
+ self.get_input_and_output_scriptpubkeys(tx["txid"])
+ matching_scripthashes = []
+ for spk in (output_scriptpubkeys + input_scriptpubkeys):
+ scripthash = script_to_scripthash(spk)
+ if scripthash in self.address_history:
+ matching_scripthashes.append(scripthash)
+ if len(matching_scripthashes) == 0:
+ continue
+
+ for wal in self.deterministic_wallets:
+ overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit(
+ output_scriptpubkeys)
+ if overrun_depths != None:
+ for change, import_count in overrun_depths.items():
+ spks = wal.get_new_scriptpubkeys(change, import_count)
+ for spk in spks:
+ self.address_history[script_to_scripthash(
+ spk)] = {'history': [], 'subscribed': False}
+ new_addrs = [script_to_address(s, self.rpc)
+ for s in spks]
+ logger.debug("importing " + str(len(spks)) +
+ " into change=" + str(change))
+ import_addresses(self.rpc, new_addrs, logger)
+
+ updated_scripthashes.extend(matching_scripthashes)
+ new_history_element = self.generate_new_history_element(tx, txd)
+ logger.info("Found new tx: " + str(new_history_element))
+ for scrhash in matching_scripthashes:
+ self.address_history[scrhash]["history"].append(
+ new_history_element)
+ if new_history_element["height"] == 0:
+ self.unconfirmed_txes[tx["txid"]].append(scrhash)
+ if tx["confirmations"] > 0:
+ self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
+ new_history_element["height"], matching_scripthashes))
+ return set(updated_scripthashes)
+
diff --git a/electrumpersonalserver/transactionmonitor.py b/electrumpersonalserver/transactionmonitor.py
@@ -1,470 +0,0 @@
-
-import time, pprint, math, sys
-from decimal import Decimal
-from collections import defaultdict
-
-from electrumpersonalserver.jsonrpc import JsonRpcError
-import electrumpersonalserver.hashes as hashes
-
-#internally this code uses scriptPubKeys, it only converts to bitcoin addresses
-# when importing to bitcoind or checking whether enough addresses have been
-# imported
-#the electrum protocol uses sha256(scriptpubkey) as a key for lookups
-# this code calls them scripthashes
-
-#code will generate the first address from each deterministic wallet
-# and check whether they have been imported into the bitcoin node
-# if no then initial_import_count addresses will be imported, then exit
-# if yes then initial_import_count addresses will be generated and extra
-# addresses will be generated one-by-one, each time checking whether they have
-# been imported into the bitcoin node
-# when an address has been reached that has not been imported, that means
-# we've reached the end, then rewind the deterministic wallet index by one
-
-#when a transaction happens paying to an address from a deterministic wallet
-# lookup the position of that address, if its less than gap_limit then
-# import more addresses
-
-ADDRESSES_LABEL = "electrum-watchonly-addresses"
-CONFIRMATIONS_SAFE_FROM_REORG = 100
-
-def import_addresses(rpc, addrs, debug, log):
- debug("importing addrs = " + str(addrs))
- log("Importing " + str(len(addrs)) + " addresses in total")
- addr_i = iter(addrs)
- notifications = 10
- for i in range(notifications):
- pc = int(100.0 * i / notifications)
- sys.stdout.write("[" + str(pc) + "%]... ")
- sys.stdout.flush()
- for j in range(int(len(addrs) / notifications)):
- rpc.call("importaddress", [next(addr_i), ADDRESSES_LABEL, False])
- for a in addr_i: #import the reminder of addresses
- rpc.call("importaddress", [a, ADDRESSES_LABEL, False])
- print("[100%]")
- log("Importing done")
-
-class TransactionMonitor(object):
- """
- Class which monitors the bitcoind wallet for new transactions
- and builds a history datastructure for sending to electrum
- """
- def __init__(self, rpc, deterministic_wallets, debug, log):
- self.rpc = rpc
- self.deterministic_wallets = deterministic_wallets
- self.debug = debug
- self.log = log
- self.last_known_wallet_txid = None
- self.address_history = None
- self.unconfirmed_txes = None
- self.reorganizable_txes = None
-
- def get_electrum_history_hash(self, scrhash):
- return hashes.get_status_electrum( [(h["tx_hash"], h["height"])
- for h in self.address_history[scrhash]["history"]] )
-
- def get_electrum_history(self, scrhash):
- if scrhash in self.address_history:
- return self.address_history[scrhash]["history"]
- else:
- return None
-
- def subscribe_address(self, scrhash):
- if scrhash in self.address_history:
- self.address_history[scrhash]["subscribed"] = True
- return True
- else:
- return False
-
- def unsubscribe_all_addresses(self):
- for scrhash, his in self.address_history.items():
- his["subscribed"] = False
-
- def build_address_history(self, monitored_scriptpubkeys):
- self.log("Building history with " + str(len(monitored_scriptpubkeys)) +
- " addresses . . .")
- st = time.time()
- address_history = {}
- for spk in monitored_scriptpubkeys:
- address_history[hashes.script_to_scripthash(spk)] = {'history': [],
- 'subscribed': False}
- wallet_addr_scripthashes = set(address_history.keys())
- self.reorganizable_txes = []
- #populate history
- #which is a blockheight-ordered list of ("txhash", height)
- #unconfirmed transactions go at the end as ("txhash", 0, fee)
- # 0=unconfirmed -1=unconfirmed with unconfirmed parents
-
- BATCH_SIZE = 1000
- ret = list(range(BATCH_SIZE))
- t = 0
- count = 0
- obtained_txids = set()
- last_tx = None
- while len(ret) == BATCH_SIZE:
- ret = self.rpc.call("listtransactions", ["*", BATCH_SIZE, t, True])
- self.debug("listtransactions skip=" + str(t) + " len(ret)="
- + str(len(ret)))
- if t == 0 and len(ret) > 0:
- last_tx = ret[-1]
- t += len(ret)
- for tx in ret:
- if "txid" not in tx or "category" not in tx:
- continue
- if tx["category"] not in ("receive", "send"):
- continue
- if tx["confirmations"] < 0:
- continue #conflicted
- if tx["txid"] in obtained_txids:
- continue
- self.debug("adding obtained tx=" + str(tx["txid"]))
- obtained_txids.add(tx["txid"])
-
- #obtain all the addresses this transaction is involved with
- output_scriptpubkeys, input_scriptpubkeys, txd = \
- self.get_input_and_output_scriptpubkeys(tx["txid"])
- output_scripthashes = [hashes.script_to_scripthash(sc)
- for sc in output_scriptpubkeys]
- sh_to_add = wallet_addr_scripthashes.intersection(set(
- output_scripthashes))
- input_scripthashes = [hashes.script_to_scripthash(sc)
- for sc in input_scriptpubkeys]
- sh_to_add |= wallet_addr_scripthashes.intersection(set(
- input_scripthashes))
- if len(sh_to_add) == 0:
- continue
-
- for wal in self.deterministic_wallets:
- overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit(
- output_scriptpubkeys)
- if overrun_depths != None:
- self.log("ERROR: Not enough addresses imported.")
- self.log("Delete wallet.dat and increase the value " +
- "of `initial_import_count` in the file " +
- "`config.cfg` then reimport and rescan")
- #TODO make it so users dont have to delete wallet.dat
- # check whether all initial_import_count addresses are
- # imported rather than just the first one
- return False
- new_history_element = self.generate_new_history_element(tx, txd)
- for scripthash in sh_to_add:
- address_history[scripthash][
- "history"].append(new_history_element)
- if tx["confirmations"] > 0 and (tx["confirmations"] <
- CONFIRMATIONS_SAFE_FROM_REORG):
- self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
- new_history_element["height"], sh_to_add))
- count += 1
-
- unconfirmed_txes = defaultdict(list)
- for scrhash, his in address_history.items():
- uctx = self.sort_address_history_list(his)
- for u in uctx:
- unconfirmed_txes[u["tx_hash"]].append(scrhash)
- self.debug("unconfirmed_txes = " + str(unconfirmed_txes))
- self.debug("reorganizable_txes = " + str(self.reorganizable_txes))
- if len(ret) > 0:
- #txid doesnt uniquely identify transactions from listtransactions
- #but the tuple (txid, address) does
- self.last_known_wallet_txid = (last_tx["txid"],
- last_tx.get("address", None))
- else:
- self.last_known_wallet_txid = None
- self.debug("last_known_wallet_txid = " + str(
- self.last_known_wallet_txid))
-
- et = time.time()
- self.debug("address_history =\n" + pprint.pformat(address_history))
- self.log("Found " + str(count) + " txes. History built in "
- + str(et - st) + "sec")
- self.address_history = address_history
- self.unconfirmed_txes = unconfirmed_txes
- return True
-
- def get_input_and_output_scriptpubkeys(self, txid):
- gettx = self.rpc.call("gettransaction", [txid])
- txd = self.rpc.call("decoderawtransaction", [gettx["hex"]])
- output_scriptpubkeys = [out["scriptPubKey"]["hex"]
- for out in txd["vout"]]
- input_scriptpubkeys = []
- for inn in txd["vin"]:
- try:
- wallet_tx = self.rpc.call("gettransaction", [inn["txid"]])
- except JsonRpcError:
- #wallet doesnt know about this tx, so the input isnt ours
- continue
- input_decoded = self.rpc.call("decoderawtransaction", [wallet_tx[
- "hex"]])
- script = input_decoded["vout"][inn["vout"]]["scriptPubKey"]["hex"]
- input_scriptpubkeys.append(script)
- return output_scriptpubkeys, input_scriptpubkeys, txd
-
- def generate_new_history_element(self, tx, txd):
- if tx["confirmations"] == 0:
- unconfirmed_input = False
- total_input_value = 0
- for inn in txd["vin"]:
- utxo = self.rpc.call("gettxout", [inn["txid"], inn["vout"],
- True])
- if utxo is None:
- utxo = self.rpc.call("gettxout", [inn["txid"], inn["vout"],
- False])
- if utxo is None:
- rawtx = self.rpc.call("getrawtransaction", [inn["txid"],
- True])
- if rawtx is not None:
- utxo = {"confirmations": 0,
- "value": rawtx["vout"][inn["vout"]]["value"]}
- if utxo is not None:
- total_input_value += int(Decimal(utxo["value"]) *
- Decimal(1e8))
- unconfirmed_input = (unconfirmed_input or
- utxo["confirmations"] == 0)
- else:
- # Electrum will now display a weird negative fee
- self.log("WARNING: input utxo not found(!)")
-
- self.debug("total_input_value = " + str(total_input_value))
- fee = total_input_value - sum([int(Decimal(out["value"])
- * Decimal(1e8)) for out in txd["vout"]])
- height = -1 if unconfirmed_input else 0
- new_history_element = ({"tx_hash": tx["txid"], "height": height,
- "fee": fee})
- else:
- blockheader = self.rpc.call("getblockheader", [tx['blockhash']])
- new_history_element = ({"tx_hash": tx["txid"],
- "height": blockheader["height"]})
- return new_history_element
-
- def sort_address_history_list(self, his):
- unconfirm_txes = list(filter(lambda h:h["height"] == 0, his["history"]))
- confirm_txes = filter(lambda h:h["height"] != 0, his["history"])
- #TODO txes must be "in blockchain order"
- # the order they appear in the block
- # it might be "blockindex" in listtransactions and gettransaction
- #so must sort with key height+':'+blockindex
- #maybe check if any heights are the same then get the pos only for those
- #better way to do this is to have a separate dict that isnt in history
- # which maps txid => blockindex
- # and then sort by key height+":"+idx[txid]
- his["history"] = sorted(confirm_txes, key=lambda h:h["height"])
- his["history"].extend(unconfirm_txes)
- return unconfirm_txes
-
- def check_for_updated_txes(self):
- updated_scrhashes1 = self.check_for_new_txes()
- updated_scrhashes2 = self.check_for_confirmations()
- updated_scrhashes3 = self.check_for_reorganizations()
- updated_scrhashes = (updated_scrhashes1 | updated_scrhashes2
- | updated_scrhashes3)
- for ush in updated_scrhashes:
- his = self.address_history[ush]
- self.sort_address_history_list(his)
- if len(updated_scrhashes) > 0:
- self.debug("new tx address_history =\n"
- + pprint.pformat(self.address_history))
- self.debug("unconfirmed txes = "
- + pprint.pformat(self.unconfirmed_txes))
- self.debug("self.reorganizable_txes = "
- + pprint.pformat(self.reorganizable_txes))
- self.debug("updated_scripthashes = " + str(updated_scrhashes))
- updated_scrhashes = filter(lambda sh:self.address_history[sh][
- "subscribed"], updated_scrhashes)
- return updated_scrhashes
-
- def check_for_reorganizations(self):
- elements_removed = []
- elements_added = []
- updated_scrhashes = set()
- self.debug("reorganizable_txes = " + str(self.reorganizable_txes))
- for reorgable_tx in self.reorganizable_txes:
- txid, blockhash, height, scrhashes = reorgable_tx
- tx = self.rpc.call("gettransaction", [txid])
- if tx["confirmations"] >= CONFIRMATIONS_SAFE_FROM_REORG:
- elements_removed.append(reorgable_tx)
- self.debug("Transaction considered safe from reorg: " + txid)
- continue
- if tx["confirmations"] < 1:
- updated_scrhashes.update(scrhashes)
- if tx["confirmations"] == 0:
- #transaction became unconfirmed in a reorg
- self.log("A transaction was reorg'd out: " + txid)
- elements_removed.append(reorgable_tx)
- self.unconfirmed_txes[txid].extend(scrhashes)
-
- #add to history as unconfirmed
- txd = self.rpc.call("decoderawtransaction", [tx["hex"]])
- new_history_element = self.generate_new_history_element(tx,
- txd)
- for scrhash in scrhashes:
- self.address_history[scrhash]["history"].append(
- new_history_element)
-
- elif tx["confirmations"] < 0:
- #tx became conflicted in reorg i.e. a double spend
- self.log("A transaction was double spent! " + txid)
- elements_removed.append(reorgable_tx)
- elif tx["blockhash"] != blockhash:
- block = self.rpc.call("getblockheader", [tx["blockhash"]])
- if block["height"] == height: #reorg but height is the same
- self.log("A transaction was reorg'd but still confirmed " +
- "at same height: " + txid)
- continue
- #reorged but still confirmed at a different height
- updated_scrhashes.update(scrhashes)
- self.log("A transaction was reorg'd but still confirmed to " +
- "a new block and different height: " + txid)
- #update history with the new height
- for scrhash in scrhashes:
- for h in self.address_history[scrhash]["history"]:
- if h["tx_hash"] == txid:
- h["height"] = block["height"]
- #modify the reorgable tx with new hash and height
- elements_removed.append(reorgable_tx)
- elements_added.append((txid, tx["blockhash"], block["height"],
- scrhashes))
- continue
- else:
- continue #no change to reorgable tx
- #remove tx from history
- for scrhash in scrhashes:
- deleted_entries = [h for h in self.address_history[scrhash][
- "history"] if h["tx_hash"] == txid and
- h["height"] == height]
- for d_his in deleted_entries:
- self.address_history[scrhash]["history"].remove(d_his)
-
- for reorged_tx in elements_removed:
- self.reorganizable_txes.remove(reorged_tx)
- self.reorganizable_txes.extend(elements_added)
- return updated_scrhashes
-
- def check_for_confirmations(self):
- tx_scrhashes_removed_from_mempool = []
- self.debug("check4con unconfirmed_txes = "
- + pprint.pformat(self.unconfirmed_txes))
- for uc_txid, scrhashes in self.unconfirmed_txes.items():
- tx = self.rpc.call("gettransaction", [uc_txid])
- self.debug("uc_txid=" + uc_txid + " => " + str(tx))
- if tx["confirmations"] == 0:
- continue #still unconfirmed
- tx_scrhashes_removed_from_mempool.append((uc_txid, scrhashes))
- if tx["confirmations"] > 0:
- self.log("A transaction confirmed: " + uc_txid)
- block = self.rpc.call("getblockheader", [tx["blockhash"]])
- elif tx["confirmations"] < 0:
- self.log("A transaction became conflicted: " + uc_txid)
- for scrhash in scrhashes:
- #delete the old unconfirmed entry in address_history
- deleted_entries = [h for h in self.address_history[scrhash][
- "history"] if h["tx_hash"] == uc_txid]
- for d_his in deleted_entries:
- self.address_history[scrhash]["history"].remove(d_his)
- if tx["confirmations"] > 0:
- #create the new confirmed entry in address_history
- self.address_history[scrhash]["history"].append({"height":
- block["height"], "tx_hash": uc_txid})
- if tx["confirmations"] > 0:
- self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
- block["height"], scrhashes))
- updated_scrhashes = set()
- for tx, scrhashes in tx_scrhashes_removed_from_mempool:
- del self.unconfirmed_txes[tx]
- updated_scrhashes.update(set(scrhashes))
- return updated_scrhashes
-
- def check_for_new_txes(self):
- MAX_TX_REQUEST_COUNT = 256
- tx_request_count = 2
- max_attempts = int(math.log(MAX_TX_REQUEST_COUNT, 2))
- for i in range(max_attempts):
- self.debug("listtransactions tx_request_count="
- + str(tx_request_count))
- ##how listtransactions works
- ##skip and count parameters take most-recent txes first
- ## so skip=0 count=1 will return the most recent tx
- ##and skip=0 count=3 will return the 3 most recent txes
- ##but the actual list returned has the REVERSED order
- ##skip=0 count=3 will return a list with the most recent tx LAST
- ret = self.rpc.call("listtransactions", ["*", tx_request_count, 0,
- True])
- ret = ret[::-1]
- if self.last_known_wallet_txid == None:
- recent_tx_index = len(ret) #=0 means no new txes
- break
- else:
- txid_list = [(tx["txid"], tx.get("address", None))
- for tx in ret]
- recent_tx_index = next((i for i, (txid, addr)
- in enumerate(txid_list) if
- txid == self.last_known_wallet_txid[0] and
- addr == self.last_known_wallet_txid[1]), -1)
- if recent_tx_index != -1:
- break
- tx_request_count *= 2
-
- #TODO low priority: handle a user getting more than 255 new
- # transactions in 15 seconds
- self.debug("recent tx index = " + str(recent_tx_index) + " ret = " +
- str([(t["txid"], t.get("address", None)) for t in ret]))
- if len(ret) > 0:
- self.last_known_wallet_txid = (ret[0]["txid"],
- ret[0].get("address", None))
- self.debug("last_known_wallet_txid = " + str(
- self.last_known_wallet_txid))
- assert(recent_tx_index != -1)
- if recent_tx_index == 0:
- return set()
- new_txes = ret[:recent_tx_index][::-1]
- self.debug("new txes = " + str(new_txes))
- obtained_txids = set()
- updated_scripthashes = []
- for tx in new_txes:
- if "txid" not in tx or "category" not in tx:
- continue
- if tx["category"] not in ("receive", "send"):
- continue
- if tx["confirmations"] < 0:
- continue #conflicted
- if tx["txid"] in obtained_txids:
- continue
- obtained_txids.add(tx["txid"])
- output_scriptpubkeys, input_scriptpubkeys, txd = \
- self.get_input_and_output_scriptpubkeys(tx["txid"])
- matching_scripthashes = []
- for spk in (output_scriptpubkeys + input_scriptpubkeys):
- scripthash = hashes.script_to_scripthash(spk)
- if scripthash in self.address_history:
- matching_scripthashes.append(scripthash)
- if len(matching_scripthashes) == 0:
- continue
-
- for wal in self.deterministic_wallets:
- overrun_depths = wal.have_scriptpubkeys_overrun_gaplimit(
- output_scriptpubkeys)
- if overrun_depths != None:
- for change, import_count in overrun_depths.items():
- spks = wal.get_new_scriptpubkeys(change, import_count)
- for spk in spks:
- self.address_history[hashes.script_to_scripthash(
- spk)] = {'history': [], 'subscribed': False}
- new_addrs = [hashes.script_to_address(s, self.rpc)
- for s in spks]
- self.debug("importing " + str(len(spks)) +
- " into change=" + str(change))
- import_addresses(self.rpc, new_addrs, self.debug,
- self.log)
-
- updated_scripthashes.extend(matching_scripthashes)
- new_history_element = self.generate_new_history_element(tx, txd)
- self.log("Found new tx: " + str(new_history_element))
- for scrhash in matching_scripthashes:
- self.address_history[scrhash]["history"].append(
- new_history_element)
- if new_history_element["height"] == 0:
- self.unconfirmed_txes[tx["txid"]].append(scrhash)
- if tx["confirmations"] > 0:
- self.reorganizable_txes.append((tx["txid"], tx["blockhash"],
- new_history_element["height"], matching_scripthashes))
- return set(updated_scripthashes)
-
diff --git a/rescan-script.bat b/rescan-script.bat
@@ -1,3 +0,0 @@
-@echo off
-python rescan-script.py
-pause
diff --git a/rescan-script.py b/rescan-script.py
@@ -1,71 +0,0 @@
-#! /usr/bin/env python3
-
-from configparser import ConfigParser, NoSectionError, NoOptionError
-from electrumpersonalserver.jsonrpc import JsonRpc, JsonRpcError
-from datetime import datetime
-import server
-
-def search_for_block_height_of_date(datestr, rpc):
- target_time = datetime.strptime(datestr, "%d/%m/%Y")
- bestblockhash = rpc.call("getbestblockhash", [])
- best_head = rpc.call("getblockheader", [bestblockhash])
- if target_time > datetime.fromtimestamp(best_head["time"]):
- print("ERROR: date in the future")
- return -1
- genesis_block = rpc.call("getblockheader", [rpc.call("getblockhash", [0])])
- if target_time < datetime.fromtimestamp(genesis_block["time"]):
- print("WARNING: date is before the creation of bitcoin")
- return 0
- first_height = 0
- last_height = best_head["height"]
- while True:
- m = (first_height + last_height) // 2
- m_header = rpc.call("getblockheader", [rpc.call("getblockhash", [m])])
- m_header_time = datetime.fromtimestamp(m_header["time"])
- m_time_diff = (m_header_time - target_time).total_seconds()
- if abs(m_time_diff) < 60*60*2: #2 hours
- return m_header["height"]
- elif m_time_diff < 0:
- first_height = m
- elif m_time_diff > 0:
- last_height = m
- else:
- return -1
-
-def main():
- try:
- config = ConfigParser()
- config.read(["config.cfg"])
- config.options("master-public-keys")
- except NoSectionError:
- print("Non-existant configuration file `config.cfg`")
- return
- try:
- rpc_u = config.get("bitcoin-rpc", "rpc_user")
- rpc_p = config.get("bitcoin-rpc", "rpc_password")
- except NoOptionError:
- rpc_u, rpc_p = server.obtain_rpc_username_password(config.get(
- "bitcoin-rpc", "datadir"))
- if rpc_u == None:
- return
- rpc = JsonRpc(host = config.get("bitcoin-rpc", "host"),
- port = int(config.get("bitcoin-rpc", "port")),
- user = rpc_u, password = rpc_p,
- wallet_filename=config.get("bitcoin-rpc", "wallet_filename").strip())
- user_input = input("Enter earliest wallet creation date (DD/MM/YYYY) "
- "or block height to rescan from: ")
- try:
- height = int(user_input)
- except ValueError:
- height = search_for_block_height_of_date(user_input, rpc)
- if height == -1:
- return
- height -= 2016 #go back two weeks for safety
-
- if input("Rescan from block height " + str(height) + " ? (y/n):") != 'y':
- return
- rpc.call("rescanblockchain", [height])
- print("end")
-
-
-main()
diff --git a/run-server.bat b/run-server.bat
@@ -1,3 +0,0 @@
-@echo off
-python server.py
-pause
diff --git a/server.py b/server.py
@@ -1,572 +0,0 @@
-#! /usr/bin/env python3
-
-import socket, time, json, datetime, struct, binascii, ssl, os, os.path
-from configparser import ConfigParser, NoSectionError, NoOptionError
-from collections import defaultdict
-import traceback, sys, platform
-from ipaddress import ip_network, ip_address
-
-from electrumpersonalserver.jsonrpc import JsonRpc, JsonRpcError
-import electrumpersonalserver.hashes as hashes
-import electrumpersonalserver.merkleproof as merkleproof
-import electrumpersonalserver.deterministicwallet as deterministicwallet
-import electrumpersonalserver.transactionmonitor as transactionmonitor
-
-VERSION_NUMBER = "0.1"
-
-DONATION_ADDR = "bc1q5d8l0w33h65e2l5x7ty6wgnvkvlqcz0wfaslpz"
-
-BANNER = \
-"""Welcome to Electrum Personal Server
-
-Monitoring {detwallets} deterministic wallets, in total {addr} addresses.
-
-Connected bitcoin node: {useragent}
-Peers: {peers}
-Uptime: {uptime}
-Blocksonly: {blocksonly}
-Pruning: {pruning}
-Download: {recvbytes}
-Upload: {sentbytes}
-
-https://github.com/chris-belcher/electrum-personal-server
-
-Donate to help make Electrum Personal Server even better:
-{donationaddr}
-
-"""
-
-##python has demented rules for variable scope, so these
-## global variables are actually mutable lists
-subscribed_to_headers = [False]
-are_headers_raw = [False]
-bestblockhash = [None]
-debug_fd = None
-
-#log for checking up/seeing your wallet, debug for when something has gone wrong
-def debugorlog(line, ttype):
- timestamp = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S,%f")
- return timestamp + " [" + ttype + "] " + line
-
-def debug(line):
- global debug_fd
- if debug_fd == None:
- return
- debug_fd.write(debugorlog(line, "DEBUG") + "\n")
- debug_fd.flush()
-
-def log(line):
- global debug_fd
- line = debugorlog(line, " LOG")
- print(line)
- if debug_fd == None:
- return
- debug_fd.write(line + "\n")
- debug_fd.flush()
-
-def send_response(sock, query, result):
- query["result"] = result
- query["jsonrpc"] = "2.0"
- sock.sendall(json.dumps(query).encode('utf-8') + b'\n')
- debug('<= ' + json.dumps(query))
-
-def send_update(sock, update):
- update["jsonrpc"] = "2.0"
- sock.sendall(json.dumps(update).encode('utf-8') + b'\n')
- debug('<= ' + json.dumps(update))
-
-def send_error(sock, nid, error):
- payload = {"error": error, "jsonrpc": "2.0", "id": nid}
- sock.sendall(json.dumps(payload).encode('utf-8') + b'\n')
- debug('<= ' + json.dumps(payload))
-
-def on_heartbeat_listening(txmonitor):
- debug("on heartbeat listening")
- txmonitor.check_for_updated_txes()
-
-def on_heartbeat_connected(sock, rpc, txmonitor):
- debug("on heartbeat connected")
- is_tip_updated, header = check_for_new_blockchain_tip(rpc,
- are_headers_raw[0])
- if is_tip_updated:
- debug("Blockchain tip updated")
- if subscribed_to_headers[0]:
- update = {"method": "blockchain.headers.subscribe",
- "params": [header]}
- send_update(sock, update)
- updated_scripthashes = txmonitor.check_for_updated_txes()
- for scrhash in updated_scripthashes:
- history_hash = txmonitor.get_electrum_history_hash(scrhash)
- update = {"method": "blockchain.scripthash.subscribe", "params":
- [scrhash, history_hash]}
- send_update(sock, update)
-
-def on_disconnect(txmonitor):
- subscribed_to_headers[0] = False
- txmonitor.unsubscribe_all_addresses()
-
-def handle_query(sock, line, rpc, txmonitor):
- debug("=> " + line)
- try:
- query = json.loads(line)
- except json.decoder.JSONDecodeError as e:
- raise IOError(e)
- method = query["method"]
-
- #protocol documentation
- #https://github.com/kyuupichan/electrumx/blob/master/docs/PROTOCOL.rst
- if method == "blockchain.transaction.get":
- tx = rpc.call("gettransaction", [query["params"][0]])
- send_response(sock, query, tx["hex"])
- elif method == "blockchain.transaction.get_merkle":
- txid = query["params"][0]
- try:
- tx = rpc.call("gettransaction", [txid])
- core_proof = rpc.call("gettxoutproof", [[txid], tx["blockhash"]])
- electrum_proof = merkleproof.convert_core_to_electrum_merkle_proof(
- core_proof)
- implied_merkle_root = hashes.hash_merkle_root(
- electrum_proof["merkle"], txid, electrum_proof["pos"])
- if implied_merkle_root != electrum_proof["merkleroot"]:
- raise ValueError
- txheader = get_block_header(rpc, tx["blockhash"])
- reply = {"block_height": txheader["block_height"], "pos":
- electrum_proof["pos"], "merkle": electrum_proof["merkle"]}
- except (ValueError, JsonRpcError) as e:
- log("WARNING: merkle proof failed for " + txid + " err=" + repr(e))
- #so reply with an invalid proof which electrum handles without
- # disconnecting us
- #https://github.com/spesmilo/electrum/blob/c8e67e2bd07efe042703bc1368d499c5e555f854/lib/verifier.py#L74
- reply = {"block_height": 1, "pos": 0, "merkle": [txid]}
- send_response(sock, query, reply)
- elif method == "blockchain.scripthash.subscribe":
- scrhash = query["params"][0]
- if txmonitor.subscribe_address(scrhash):
- history_hash = txmonitor.get_electrum_history_hash(scrhash)
- else:
- log("WARNING: address scripthash not known to server: " + scrhash)
- history_hash = hashes.get_status_electrum([])
- send_response(sock, query, history_hash)
- elif method == "blockchain.scripthash.get_history":
- scrhash = query["params"][0]
- history = txmonitor.get_electrum_history(scrhash)
- if history == None:
- history = []
- log("WARNING: address scripthash history not known to server: "
- + scrhash)
- send_response(sock, query, history)
- elif method == "blockchain.headers.subscribe":
- subscribed_to_headers[0] = True
- if len(query["params"]) > 0:
- are_headers_raw[0] = query["params"][0]
- new_bestblockhash, header = get_current_header(rpc, are_headers_raw[0])
- send_response(sock, query, header)
- elif method == "blockchain.block.get_header":
- height = query["params"][0]
- try:
- blockhash = rpc.call("getblockhash", [height])
- header = get_block_header(rpc, blockhash)
- send_response(sock, query, header)
- except JsonRpcError:
- error = {"message": "height " + str(height) + " out of range",
- "code": -1}
- send_error(sock, query["id"], error)
- elif method == "blockchain.block.headers":
- MAX_CHUNK_SIZE = 2016
- start_height = query["params"][0]
- count = query["params"][1]
- count = min(count, MAX_CHUNK_SIZE)
- headers_hex, n = get_block_headers_hex(rpc, start_height, count)
- send_response(sock, query, {'hex': headers_hex, 'count': n, 'max':
- MAX_CHUNK_SIZE})
- elif method == "blockchain.block.get_chunk":
- RETARGET_INTERVAL = 2016
- index = query["params"][0]
- tip_height = rpc.call("getblockchaininfo", [])["headers"]
- #logic copied from kyuupichan's electrumx get_chunk() in controller.py
- next_height = tip_height + 1
- start_height = min(index*RETARGET_INTERVAL, next_height)
- count = min(next_height - start_height, RETARGET_INTERVAL)
- headers_hex, n = get_block_headers_hex(rpc, start_height, count)
- send_response(sock, query, headers_hex)
- elif method == "blockchain.transaction.broadcast":
- try:
- result = rpc.call("sendrawtransaction", [query["params"][0]])
- except JsonRpcError as e:
- result = str(e)
- debug("tx broadcast result = " + str(result))
- send_response(sock, query, result)
- elif method == "mempool.get_fee_histogram":
- mempool = rpc.call("getrawmempool", [True])
-
- #algorithm copied from the relevant place in ElectrumX
- #https://github.com/kyuupichan/electrumx/blob/e92c9bd4861c1e35989ad2773d33e01219d33280/server/mempool.py
- fee_hist = defaultdict(int)
- for txid, details in mempool.items():
- fee_rate = 1e8*details["fee"] // details["size"]
- fee_hist[fee_rate] += details["size"]
-
- l = list(reversed(sorted(fee_hist.items())))
- out = []
- size = 0
- r = 0
- binsize = 100000
- for fee, s in l:
- size += s
- if size + r > binsize:
- out.append((fee, size))
- r += size - binsize
- size = 0
- binsize *= 1.1
-
- result = out
- send_response(sock, query, result)
- elif method == "blockchain.estimatefee":
- estimate = rpc.call("estimatesmartfee", [query["params"][0]])
- feerate = 0.0001
- if "feerate" in estimate:
- feerate = estimate["feerate"]
- send_response(sock, query, feerate)
- elif method == "blockchain.relayfee":
- networkinfo = rpc.call("getnetworkinfo", [])
- send_response(sock, query, networkinfo["relayfee"])
- elif method == "server.banner":
- networkinfo = rpc.call("getnetworkinfo", [])
- blockchaininfo = rpc.call("getblockchaininfo", [])
- uptime = rpc.call("uptime", [])
- nettotals = rpc.call("getnettotals", [])
- send_response(sock, query, BANNER.format(
- detwallets=len(txmonitor.deterministic_wallets),
- addr=len(txmonitor.address_history),
- useragent=networkinfo["subversion"],
- peers=networkinfo["connections"],
- uptime=str(datetime.timedelta(seconds=uptime)),
- blocksonly=not networkinfo["localrelay"],
- pruning=blockchaininfo["pruned"],
- recvbytes=hashes.bytes_fmt(nettotals["totalbytesrecv"]),
- sentbytes=hashes.bytes_fmt(nettotals["totalbytessent"]),
- donationaddr=DONATION_ADDR))
- elif method == "server.donation_address":
- send_response(sock, query, DONATION_ADDR)
- elif method == "server.version":
- send_response(sock, query, ["ElectrumPersonalServer "
- + VERSION_NUMBER, VERSION_NUMBER])
- elif method == "server.peers.subscribe":
- send_response(sock, query, []) #no peers to report
- else:
- log("*** BUG! Not handling method: " + method + " query=" + str(query))
- #TODO just send back the same query with result = []
-
-def get_block_header(rpc, blockhash, raw=False):
- rpc_head = rpc.call("getblockheader", [blockhash])
- if "previousblockhash" in rpc_head:
- prevblockhash = rpc_head["previousblockhash"]
- else:
- prevblockhash = "00"*32 #genesis block
- if raw:
- head_hex = struct.pack("<i32s32sIII", rpc_head["version"],
- binascii.unhexlify(prevblockhash)[::-1],
- binascii.unhexlify(rpc_head["merkleroot"])[::-1],
- rpc_head["time"], int(rpc_head["bits"], 16), rpc_head["nonce"])
- head_hex = binascii.hexlify(head_hex).decode("utf-8")
- header = {"hex": head_hex, "height": rpc_head["height"]}
- else:
- header = {"block_height": rpc_head["height"],
- "prev_block_hash": prevblockhash,
- "timestamp": rpc_head["time"],
- "merkle_root": rpc_head["merkleroot"],
- "version": rpc_head["version"],
- "nonce": rpc_head["nonce"],
- "bits": int(rpc_head["bits"], 16)}
- return header
-
-def get_current_header(rpc, raw):
- new_bestblockhash = rpc.call("getbestblockhash", [])
- header = get_block_header(rpc, new_bestblockhash, raw)
- return new_bestblockhash, header
-
-def check_for_new_blockchain_tip(rpc, raw):
- new_bestblockhash, header = get_current_header(rpc, raw)
- is_tip_new = bestblockhash[0] != new_bestblockhash
- bestblockhash[0] = new_bestblockhash
- return is_tip_new, header
-
-def get_block_headers_hex(rpc, start_height, count):
- #read count number of headers starting from start_height
- result = bytearray()
- try:
- the_hash = rpc.call("getblockhash", [start_height])
- except JsonRpcError as e:
- return "", 0
- for i in range(count):
- header = rpc.call("getblockheader", [the_hash])
- #add header hex to result
- if "previousblockhash" in header:
- prevblockhash = header["previousblockhash"]
- else:
- prevblockhash = "00"*32 #genesis block
- h1 = struct.pack("<i32s32sIII", header["version"],
- binascii.unhexlify(prevblockhash)[::-1],
- binascii.unhexlify(header["merkleroot"])[::-1],
- header["time"], int(header["bits"], 16), header["nonce"])
- result.extend(h1)
- if "nextblockhash" not in header:
- break
- the_hash = header["nextblockhash"]
- return binascii.hexlify(result).decode("utf-8"), int(len(result)/80)
-
-def create_server_socket(hostport):
- server_sock = socket.socket()
- server_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
- server_sock.bind(hostport)
- server_sock.listen(1)
- log("Listening for Electrum Wallet on " + str(hostport))
- return server_sock
-
-def run_electrum_server(rpc, txmonitor, hostport, ip_whitelist,
- poll_interval_listening, poll_interval_connected, certfile, keyfile):
- log("Starting electrum server")
- server_sock = create_server_socket(hostport)
- server_sock.settimeout(poll_interval_listening)
- while True:
- try:
- sock = None
- while sock == None:
- try:
- sock, addr = server_sock.accept()
- if not any([ip_address(addr[0]) in ipnet
- for ipnet in ip_whitelist]):
- debug(addr[0] + " not in whitelist, closing")
- raise ConnectionRefusedError()
- sock = ssl.wrap_socket(sock, server_side=True,
- certfile=certfile, keyfile=keyfile,
- ssl_version=ssl.PROTOCOL_SSLv23)
- except socket.timeout:
- on_heartbeat_listening(txmonitor)
- except (ConnectionRefusedError, ssl.SSLError):
- sock.close()
- sock = None
-
- log('Electrum connected from ' + str(addr))
- sock.settimeout(poll_interval_connected)
- recv_buffer = bytearray()
- while True:
- try:
- recv_data = sock.recv(4096)
- if not recv_data or len(recv_data) == 0:
- raise EOFError()
- recv_buffer.extend(recv_data)
- lb = recv_buffer.find(b'\n')
- if lb == -1:
- continue
- while lb != -1:
- line = recv_buffer[:lb].rstrip()
- recv_buffer = recv_buffer[lb + 1:]
- lb = recv_buffer.find(b'\n')
- handle_query(sock, line.decode("utf-8"), rpc,
- txmonitor)
- except socket.timeout:
- on_heartbeat_connected(sock, rpc, txmonitor)
- except (IOError, EOFError) as e:
- if isinstance(e, EOFError):
- log("Electrum wallet disconnected")
- else:
- log("IOError: " + repr(e))
- try:
- sock.close()
- except IOError:
- pass
- sock = None
- on_disconnect(txmonitor)
- time.sleep(0.2)
-
-def get_scriptpubkeys_to_monitor(rpc, config):
- log("Obtaining bitcoin addresses to monitor . . .")
- st = time.time()
- try:
- imported_addresses = set(rpc.call("getaddressesbyaccount",
- [transactionmonitor.ADDRESSES_LABEL]))
- debug("using deprecated accounts interface")
- except JsonRpcError:
- #bitcoin core 0.17 deprecates accounts, replaced with labels
- if transactionmonitor.ADDRESSES_LABEL in rpc.call("listlabels", []):
- imported_addresses = set(rpc.call("getaddressesbylabel",
- [transactionmonitor.ADDRESSES_LABEL]).keys())
- else:
- #no label, no addresses imported at all
- imported_addresses = set()
- debug("already-imported addresses = " + str(imported_addresses))
-
- deterministic_wallets = []
- for key in config.options("master-public-keys"):
- wal = deterministicwallet.parse_electrum_master_public_key(
- config.get("master-public-keys", key),
- int(config.get("bitcoin-rpc", "gap_limit")))
- deterministic_wallets.append(wal)
-
- #check whether these deterministic wallets have already been imported
- import_needed = False
- wallets_imported = 0
- spks_to_import = []
- for wal in deterministic_wallets:
- first_addr = hashes.script_to_address(wal.get_scriptpubkeys(change=0,
- from_index=0, count=1)[0], rpc)
- if first_addr not in imported_addresses:
- import_needed = True
- wallets_imported += 1
- for change in [0, 1]:
- spks_to_import.extend(wal.get_scriptpubkeys(change, 0,
- int(config.get("bitcoin-rpc", "initial_import_count"))))
- #check whether watch-only addresses have been imported
- watch_only_addresses = []
- for key in config.options("watch-only-addresses"):
- watch_only_addresses.extend(config.get("watch-only-addresses",
- key).split(' '))
- watch_only_addresses = set(watch_only_addresses)
- watch_only_addresses_to_import = []
- if not watch_only_addresses.issubset(imported_addresses):
- import_needed = True
- watch_only_addresses_to_import = (watch_only_addresses -
- imported_addresses)
-
- #if addresses need to be imported then return them
- if import_needed:
- addresses_to_import = [hashes.script_to_address(spk, rpc)
- for spk in spks_to_import]
- #TODO minus imported_addresses
- log("Importing " + str(wallets_imported) + " wallets and " +
- str(len(watch_only_addresses_to_import)) + " watch-only " +
- "addresses into the Bitcoin node")
- time.sleep(5)
- return (True, addresses_to_import + list(
- watch_only_addresses_to_import), None)
-
- #test
- # importing one det wallet and no addrs, two det wallets and no addrs
- # no det wallets and some addrs, some det wallets and some addrs
-
- #at this point we know we dont need to import any addresses
- #find which index the deterministic wallets are up to
- spks_to_monitor = []
- for wal in deterministic_wallets:
- for change in [0, 1]:
- spks_to_monitor.extend(wal.get_scriptpubkeys(change, 0,
- int(config.get("bitcoin-rpc", "initial_import_count"))))
- #loop until one address found that isnt imported
- while True:
- spk = wal.get_new_scriptpubkeys(change, count=1)[0]
- spks_to_monitor.append(spk)
- if hashes.script_to_address(spk, rpc) not in imported_addresses:
- break
- spks_to_monitor.pop()
- wal.rewind_one(change)
-
- spks_to_monitor.extend([hashes.address_to_script(addr, rpc)
- for addr in watch_only_addresses])
- et = time.time()
- log("Obtained list of addresses to monitor in " + str(et - st) + "sec")
- return False, spks_to_monitor, deterministic_wallets
-
-def obtain_rpc_username_password(datadir):
- if len(datadir.strip()) == 0:
- debug("no datadir configuration, checking in default location")
- systemname = platform.system()
- #paths from https://en.bitcoin.it/wiki/Data_directory
- if systemname == "Linux":
- datadir = os.path.expanduser("~/.bitcoin")
- elif systemname == "Windows":
- datadir = os.path.expandvars("%APPDATA%\Bitcoin")
- elif systemname == "Darwin": #mac os
- datadir = os.path.expanduser(
- "~/Library/Application Support/Bitcoin/")
- cookie_path = os.path.join(datadir, ".cookie")
- if not os.path.exists(cookie_path):
- log("Unable to find .cookie file, try setting `datadir` config")
- return None, None
- fd = open(cookie_path)
- username, password = fd.read().strip().split(":")
- fd.close()
- return username, password
-
-def main():
- global debug_fd
- if len(sys.argv) == 2:
- if sys.argv[1] == "--help":
- print("Usage: ./server.py <path/to/current/working/dir>\nRunning" +
- " without arg defaults to the directory you're in right now")
- return
- else:
- os.chdir(sys.argv[1])
- debug_fd = open("debug.log", "w")
- debug("current working directory is: " + os.getcwd())
- try:
- config = ConfigParser()
- config.read("config.cfg")
- config.options("master-public-keys")
- except NoSectionError:
- log("Non-existant configuration file `config.cfg`")
- return
- try:
- rpc_u = config.get("bitcoin-rpc", "rpc_user")
- rpc_p = config.get("bitcoin-rpc", "rpc_password")
- debug("obtaining auth from rpc_user/pass")
- except NoOptionError:
- rpc_u, rpc_p = obtain_rpc_username_password(config.get(
- "bitcoin-rpc", "datadir"))
- debug("obtaining auth from .cookie")
- if rpc_u == None:
- return
- rpc = JsonRpc(host = config.get("bitcoin-rpc", "host"),
- port = int(config.get("bitcoin-rpc", "port")),
- user = rpc_u, password = rpc_p,
- wallet_filename=config.get("bitcoin-rpc", "wallet_filename").strip())
-
- #TODO somewhere here loop until rpc works and fully sync'd, to allow
- # people to run this script without waiting for their node to fully
- # catch up sync'd when getblockchaininfo blocks == headers, or use
- # verificationprogress
- printed_error_msg = False
- while bestblockhash[0] == None:
- try:
- bestblockhash[0] = rpc.call("getbestblockhash", [])
- except JsonRpcError as e:
- if not printed_error_msg:
- log("Error with bitcoin json-rpc: " + repr(e))
- printed_error_msg = True
- time.sleep(5)
-
- import_needed, relevant_spks_addrs, deterministic_wallets = \
- get_scriptpubkeys_to_monitor(rpc, config)
- if import_needed:
- transactionmonitor.import_addresses(rpc, relevant_spks_addrs, debug,
- log)
- log("Done.\nIf recovering a wallet which already has existing " +
- "transactions, then\nrun the rescan script. If you're confident " +
- "that the wallets are new\nand empty then there's no need to " +
- "rescan, just restart this script")
- else:
- txmonitor = transactionmonitor.TransactionMonitor(rpc,
- deterministic_wallets, debug, log)
- if not txmonitor.build_address_history(relevant_spks_addrs):
- return
- hostport = (config.get("electrum-server", "host"),
- int(config.get("electrum-server", "port")))
- ip_whitelist = []
- for ip in config.get("electrum-server", "ip_whitelist").split(" "):
- if ip == "*":
- #matches everything
- ip_whitelist.append(ip_network("0.0.0.0/0"))
- ip_whitelist.append(ip_network("::0/0"))
- else:
- ip_whitelist.append(ip_network(ip, strict=False))
- poll_interval_listening = int(config.get("bitcoin-rpc",
- "poll_interval_listening"))
- poll_interval_connected = int(config.get("bitcoin-rpc",
- "poll_interval_connected"))
- certfile = config.get("electrum-server", "certfile")
- keyfile = config.get("electrum-server", "keyfile")
- run_electrum_server(rpc, txmonitor, hostport, ip_whitelist,
- poll_interval_listening, poll_interval_connected, certfile, keyfile)
-
-if __name__ == "__main__":
- main()
diff --git a/setup.cfg b/setup.cfg
@@ -0,0 +1,5 @@
+[aliases]
+test=pytest
+
+[tool:pytest]
+addopts = --pdb
diff --git a/setup.py b/setup.py
@@ -0,0 +1,27 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="electrum-personal-server",
+ version="0.1.4.dev0",
+ description="Electrum Personal Server",
+ author="Chris Belcher",
+ license="MIT",
+ include_package_data=True,
+ packages=find_packages(exclude=["tests"]),
+ setup_requires=["pytest-runner"],
+ tests_require=["pytest"],
+ entry_points={
+ "console_scripts": [
+ "electrum-personal-server = electrumpersonalserver.server.common:main",
+ "electrum-personal-server-rescan = electrumpersonalserver.server.common:rescan",
+ ]
+ },
+ package_data={"electrumpersonalserver": ["certs/*"]},
+ data_files=[
+ ("etc/electrum-personal-server", ["config.cfg_sample"]),
+ ("share/doc/electrum-personal-server", ["README.md"]),
+ ],
+ # install_requires=[
+ # 'secp256k1'
+ # ]
+)
diff --git a/test/test_deterministic_wallets.py b/test/test_deterministic_wallets.py
@@ -1,7 +1,7 @@
import pytest
-from electrumpersonalserver import parse_electrum_master_public_key
+from electrumpersonalserver.server import parse_electrum_master_public_key
# electrum has its own tests here
#https://github.com/spesmilo/electrum/blob/03b40a3c0a7dd84e76bc0d0ea2ad390dafc92250/lib/tests/test_wallet_vertical.py
diff --git a/test/test_merkleproof.py b/test/test_merkleproof.py
@@ -1,8 +1,10 @@
import pytest
-from electrumpersonalserver import (convert_core_to_electrum_merkle_proof,
- hash_merkle_root)
+from electrumpersonalserver.server import (
+ convert_core_to_electrum_merkle_proof,
+ hash_merkle_root
+)
'''
# code for printing out proofs not longer than 80 per line
diff --git a/test/test_parse_mpks.py b/test/test_parse_mpks.py
@@ -1,7 +1,7 @@
import pytest
-from electrumpersonalserver import parse_electrum_master_public_key
+from electrumpersonalserver.server import parse_electrum_master_public_key
@pytest.mark.parametrize(
diff --git a/test/test_transactionmonitor.py b/test/test_transactionmonitor.py
@@ -1,8 +1,15 @@
import pytest
+import logging
-from electrumpersonalserver import (DeterministicWallet, TransactionMonitor,
- JsonRpcError, script_to_scripthash)
+from electrumpersonalserver.server import (
+ DeterministicWallet,
+ TransactionMonitor,
+ JsonRpcError,
+ script_to_scripthash
+)
+
+logger = logging.getLogger('ELECTRUMPERSONALSERVER-TEST')
class DummyJsonRpc(object):
"""
@@ -30,18 +37,18 @@ class DummyJsonRpc(object):
for t in self.txlist:
if t["hex"] == params[0]:
return t
- debugf(params[0])
+ logger.debug(params[0])
assert 0
elif method == "gettxout":
for u in self.utxoset:
if u["txid"] == params[0] and u["vout"] == params[1]:
return u
- debugf("txid = " + params[0] + " vout = " + str(params[1]))
+ logger.debug("txid = " + params[0] + " vout = " + str(params[1]))
assert 0
elif method == "getblockheader":
if params[0] in self.block_heights:
return {"height": self.block_heights[params[0]]}
- debugf(params[0])
+ logger.debug(params[0])
assert 0
elif method == "decodescript":
return {"addresses": [dummy_spk_to_address(params[0])]}
@@ -73,9 +80,6 @@ def dummy_spk_to_address(spk):
##spk is short for scriptPubKey
return spk + "-address"
-debugf = lambda x: print("[DEBUG] " + x)
-logf = lambda x: print("[ LOG] " + x)
-
deterministic_wallets = [DummyDeterministicWallet()]
dummy_id_g = [1000]
@@ -106,7 +110,7 @@ def create_dummy_funding_tx(confirmations=1, output_spk=None,
"blockhash": dummy_containing_block,
"hex": "placeholder-test-txhex" + str(dummy_id)
}
- debugf("created dummy tx: " + str(dummy_tx))
+ logger.debug("created dummy tx: " + str(dummy_tx))
return dummy_spk, containing_block_height, dummy_tx
def assert_address_history_tx(address_history, spk, height, txid, subscribed):
@@ -125,7 +129,7 @@ def test_single_tx():
rpc = DummyJsonRpc([dummy_tx], [],
{dummy_tx["blockhash"]: containing_block_height})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.address_history) == 1
assert_address_history_tx(txmonitor.address_history, spk=dummy_spk,
@@ -139,7 +143,7 @@ def test_two_txes():
rpc = DummyJsonRpc([dummy_tx1, dummy_tx2], [],
{dummy_tx1["blockhash"]: containing_block_height1,
dummy_tx2["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1, dummy_spk2])
assert len(txmonitor.address_history) == 2
assert_address_history_tx(txmonitor.address_history, spk=dummy_spk1,
@@ -167,7 +171,7 @@ def test_many_txes():
assert len(txes) == INITIAL_TX_COUNT
rpc = DummyJsonRpc(txes, [dummy_tx["vin"][0]], {})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.address_history) == 1
assert len(list(txmonitor.check_for_updated_txes())) == 0
@@ -193,7 +197,7 @@ def test_non_subscribed_confirmation():
rpc = DummyJsonRpc([dummy_tx], [dummy_tx["vin"][0]],
{dummy_tx["blockhash"]: containing_block_height})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.address_history) == 1
assert_address_history_tx(txmonitor.address_history, spk=dummy_spk,
@@ -213,7 +217,7 @@ def test_tx_arrival_then_confirmation():
rpc = DummyJsonRpc([], [dummy_tx["vin"][0]], {dummy_tx["blockhash"]:
containing_block_height})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.address_history) == 1
sh = script_to_scripthash(dummy_spk)
@@ -239,7 +243,7 @@ def test_unrelated_tx():
rpc = DummyJsonRpc([dummy_tx], [], {dummy_tx["blockhash"]:
containing_block_height})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([our_dummy_spk])
assert len(txmonitor.address_history) == 1
assert len(txmonitor.get_electrum_history(script_to_scripthash(
@@ -257,7 +261,7 @@ def test_duplicate_txid():
sh = script_to_scripthash(dummy_spk)
rpc = DummyJsonRpc([dummy_tx1, dummy_tx2], [], {dummy_tx1["blockhash"]:
containing_block_height1, dummy_tx2["blockhash"]: containing_block_height2, dummy_tx3["blockhash"]: containing_block_height3})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.get_electrum_history(sh)) == 1
txmonitor.subscribe_address(sh)
@@ -276,7 +280,7 @@ def test_address_reuse():
rpc = DummyJsonRpc([dummy_tx1], [], {dummy_tx1["blockhash"]:
containing_block_height1, dummy_tx2["blockhash"]:
containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1])
sh = script_to_scripthash(dummy_spk1)
assert len(txmonitor.get_electrum_history(sh)) == 1
@@ -294,7 +298,7 @@ def test_from_address():
rpc = DummyJsonRpc([input_tx, spending_tx], [],
{input_tx["blockhash"]: containing_block_height1,
spending_tx["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1])
sh = script_to_scripthash(dummy_spk1)
assert len(txmonitor.get_electrum_history(sh)) == 2
@@ -308,7 +312,7 @@ def test_tx_within_wallet():
rpc = DummyJsonRpc([dummy_tx1, dummy_tx2], [],
{dummy_tx1["blockhash"]: containing_block_height1,
dummy_tx2["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1, dummy_spk2])
assert len(txmonitor.get_electrum_history(script_to_scripthash(
dummy_spk1))) == 2
@@ -332,7 +336,7 @@ def test_overrun_gap_limit():
rpc = DummyJsonRpc([], [], {dummy_tx["blockhash"]: containing_block_height})
txmonitor = TransactionMonitor(rpc, [DummyImportDeterministicWallet()],
- debugf, logf)
+ logger)
assert txmonitor.build_address_history([dummy_spk])
assert len(txmonitor.address_history) == 1
assert len(list(txmonitor.check_for_updated_txes())) == 0
@@ -353,7 +357,7 @@ def test_conflicted_tx():
dummy_spk, containing_block_height, dummy_tx = create_dummy_funding_tx(
confirmations=-1)
rpc = DummyJsonRpc([dummy_tx], [], {})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
sh = script_to_scripthash(dummy_spk)
assert txmonitor.build_address_history([dummy_spk])
@@ -381,7 +385,7 @@ def test_reorg_finney_attack():
rpc = DummyJsonRpc([dummy_tx1], [dummy_tx1["vin"][0]],
{dummy_tx1["blockhash"]: containing_block_height1,
dummy_tx2["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1, dummy_spk2])
assert len(txmonitor.address_history) == 2
sh1 = script_to_scripthash(dummy_spk1)
@@ -411,7 +415,7 @@ def test_reorg_race_attack():
rpc = DummyJsonRpc([dummy_tx1], [],
{dummy_tx1["blockhash"]: containing_block_height1,
dummy_tx2["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1, dummy_spk2])
assert len(txmonitor.address_history) == 2
sh1 = script_to_scripthash(dummy_spk1)
@@ -438,7 +442,7 @@ def test_reorg_censor_tx():
rpc = DummyJsonRpc([dummy_tx1], [dummy_tx1["vin"][0]],
{dummy_tx1["blockhash"]: containing_block_height1})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1])
assert len(txmonitor.address_history) == 1
sh = script_to_scripthash(dummy_spk1)
@@ -460,7 +464,7 @@ def test_reorg_different_block():
rpc = DummyJsonRpc([dummy_tx1], [],
{dummy_tx1["blockhash"]: containing_block_height1,
dummy_tx2["blockhash"]: containing_block_height2})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1])
assert len(txmonitor.address_history) == 1
sh = script_to_scripthash(dummy_spk1)
@@ -482,7 +486,7 @@ def test_tx_safe_from_reorg():
dummy_spk1, containing_block_height1, dummy_tx1 = create_dummy_funding_tx()
rpc = DummyJsonRpc([dummy_tx1], [],
{dummy_tx1["blockhash"]: containing_block_height1})
- txmonitor = TransactionMonitor(rpc, deterministic_wallets, debugf, logf)
+ txmonitor = TransactionMonitor(rpc, deterministic_wallets, logger)
assert txmonitor.build_address_history([dummy_spk1])
assert len(list(txmonitor.check_for_updated_txes())) == 0
assert len(txmonitor.reorganizable_txes) == 1