2020-02-03 12:35:50 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
#
|
|
|
|
|
# Electrum - lightweight Bitcoin client
|
|
|
|
|
# Copyright (C) 2015 Thomas Voegtlin
|
|
|
|
|
#
|
|
|
|
|
# Permission is hereby granted, free of charge, to any person
|
|
|
|
|
# obtaining a copy of this software and associated documentation files
|
|
|
|
|
# (the "Software"), to deal in the Software without restriction,
|
|
|
|
|
# including without limitation the rights to use, copy, modify, merge,
|
|
|
|
|
# publish, distribute, sublicense, and/or sell copies of the Software,
|
|
|
|
|
# and to permit persons to whom the Software is furnished to do so,
|
|
|
|
|
# subject to the following conditions:
|
|
|
|
|
#
|
|
|
|
|
# The above copyright notice and this permission notice shall be
|
|
|
|
|
# included in all copies or substantial portions of the Software.
|
|
|
|
|
#
|
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
|
|
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
|
|
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
|
|
|
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
|
|
|
|
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
|
|
|
|
|
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
|
|
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
|
# SOFTWARE.
|
|
|
|
|
import os
|
|
|
|
|
import ast
|
2023-03-13 15:15:50 +00:00
|
|
|
import datetime
|
2020-02-03 12:35:50 +01:00
|
|
|
import json
|
|
|
|
|
import copy
|
|
|
|
|
import threading
|
|
|
|
|
from collections import defaultdict
|
2020-02-24 18:26:49 +01:00
|
|
|
from typing import Dict, Optional, List, Tuple, Set, Iterable, NamedTuple, Sequence, TYPE_CHECKING, Union
|
2020-02-04 13:35:58 +01:00
|
|
|
import binascii
|
2023-03-13 15:15:50 +00:00
|
|
|
import time
|
2023-09-24 11:29:13 +02:00
|
|
|
from functools import partial
|
2023-03-13 15:15:50 +00:00
|
|
|
|
|
|
|
|
import attr
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
from . import util, bitcoin
|
2023-08-18 15:13:33 +02:00
|
|
|
from .util import profiler, WalletFileException, multisig_type, TxMinedInfo, bfh, MyEncoder
|
2023-02-27 10:31:21 +01:00
|
|
|
from .invoices import Invoice, Request
|
2020-02-03 12:35:50 +01:00
|
|
|
from .keystore import bip44_derivation
|
2024-02-26 18:54:21 +00:00
|
|
|
from .transaction import Transaction, TxOutpoint, tx_from_any, PartialTransaction, PartialTxOutput, BadHeaderMagic
|
2020-02-04 13:35:58 +01:00
|
|
|
from .logging import Logger
|
2021-10-05 09:13:01 +02:00
|
|
|
|
|
|
|
|
from .lnutil import LOCAL, REMOTE, HTLCOwner, ChannelType
|
2023-06-23 12:16:14 +02:00
|
|
|
from . import json_db
|
2021-10-05 09:13:01 +02:00
|
|
|
from .json_db import StoredDict, JsonDB, locked, modifier, StoredObject, stored_in, stored_as
|
2020-02-05 15:13:37 +01:00
|
|
|
from .plugin import run_hook, plugin_loaders
|
2023-03-13 15:15:50 +00:00
|
|
|
from .version import ELECTRUM_VERSION
|
2020-02-03 12:35:50 +01:00
|
|
|
|
2024-01-23 02:20:01 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
from .storage import WalletStorage
|
|
|
|
|
|
2020-02-13 20:00:12 +01:00
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
class WalletRequiresUpgrade(WalletFileException):
|
|
|
|
|
pass
|
2023-10-10 16:57:44 +02:00
|
|
|
|
|
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
class WalletRequiresSplit(WalletFileException):
|
|
|
|
|
def __init__(self, split_data):
|
2024-01-15 17:06:44 +01:00
|
|
|
super().__init__()
|
2023-08-18 15:13:33 +02:00
|
|
|
self._split_data = split_data
|
2020-02-13 20:00:12 +01:00
|
|
|
|
2020-02-03 12:35:50 +01:00
|
|
|
|
2023-10-10 16:57:44 +02:00
|
|
|
class WalletUnfinished(WalletFileException):
|
|
|
|
|
def __init__(self, wallet_db: 'WalletDB'):
|
2024-01-15 17:06:44 +01:00
|
|
|
super().__init__()
|
2023-10-10 16:57:44 +02:00
|
|
|
self._wallet_db = wallet_db
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# seed_version is now used for the version of the wallet file
|
2020-02-03 12:35:50 +01:00
|
|
|
OLD_SEED_VERSION = 4 # electrum versions < 2.0
|
|
|
|
|
NEW_SEED_VERSION = 11 # electrum versions >= 2.0
|
2024-01-23 01:38:17 +00:00
|
|
|
FINAL_SEED_VERSION = 58 # electrum >= 2.7 will set this to prevent
|
2020-02-03 12:35:50 +01:00
|
|
|
# old versions from overwriting new format
|
|
|
|
|
|
|
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_in('tx_fees', tuple)
|
2020-02-03 12:35:50 +01:00
|
|
|
class TxFeesValue(NamedTuple):
|
|
|
|
|
fee: Optional[int] = None
|
|
|
|
|
is_calculated_by_us: bool = False
|
|
|
|
|
num_inputs: Optional[int] = None
|
|
|
|
|
|
|
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('db_metadata')
|
2023-03-13 15:15:50 +00:00
|
|
|
@attr.s
|
|
|
|
|
class DBMetadata(StoredObject):
|
|
|
|
|
creation_timestamp = attr.ib(default=None, type=int)
|
|
|
|
|
first_electrum_version_used = attr.ib(default=None, type=str)
|
|
|
|
|
|
|
|
|
|
def to_str(self) -> str:
|
|
|
|
|
ts = self.creation_timestamp
|
|
|
|
|
ver = self.first_electrum_version_used
|
|
|
|
|
if ts is None or ver is None:
|
|
|
|
|
return "unknown"
|
|
|
|
|
date_str = datetime.date.fromtimestamp(ts).isoformat()
|
|
|
|
|
return f"using {ver}, on {date_str}"
|
|
|
|
|
|
|
|
|
|
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
# note: subclassing WalletFileException for some specific cases
|
|
|
|
|
# allows the crash reporter to distinguish them and open
|
|
|
|
|
# separate tracking issues
|
|
|
|
|
class WalletFileExceptionVersion51(WalletFileException): pass
|
|
|
|
|
|
2023-10-10 16:57:44 +02:00
|
|
|
|
2023-06-23 12:16:14 +02:00
|
|
|
# register dicts that require value conversions not handled by constructor
|
|
|
|
|
json_db.register_dict('transactions', lambda x: tx_from_any(x, deserialize=False), None)
|
|
|
|
|
json_db.register_dict('data_loss_protect_remote_pcp', lambda x: bytes.fromhex(x), None)
|
contacts: fix adding new contacts
This is a regression from 7ca89f56eef6d2d7af6fd7bacb2d29d9b73025d2, which introduced StoredList.
The newly added test was failing without the change.
```
Traceback (most recent call last):
File "/home/user/wspace/electrum/electrum/gui/qt/main_window.py", line 1786, in new_contact_dialog
self.set_contact(line2.text(), line1.text())
File "/home/user/wspace/electrum/electrum/gui/qt/main_window.py", line 1435, in set_contact
self.contacts[address] = ('address', label)
File "/home/user/wspace/electrum/electrum/contacts.py", line 75, in __setitem__
self.save()
File "/home/user/wspace/electrum/electrum/contacts.py", line 62, in save
self.db.put('contacts', dict(self))
File "/home/user/wspace/electrum/electrum/json_db.py", line 42, in wrapper
return func(self, *args, **kwargs)
File "/home/user/wspace/electrum/electrum/json_db.py", line 318, in put
self.data[key] = copy.deepcopy(value)
File "/usr/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/usr/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/usr/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/usr/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/usr/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/usr/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/usr/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/usr/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/usr/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/usr/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/usr/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: cannot pickle '_thread.RLock' object
```
2023-11-01 17:28:29 +00:00
|
|
|
json_db.register_dict('contacts', tuple, None)
|
2023-06-23 12:16:14 +02:00
|
|
|
# register dicts that require key conversion
|
|
|
|
|
for key in [
|
|
|
|
|
'adds', 'locked_in', 'settles', 'fails', 'fee_updates', 'buckets',
|
|
|
|
|
'unacked_updates', 'unfulfilled_htlcs', 'fail_htlc_reasons', 'onion_keys']:
|
|
|
|
|
json_db.register_dict_key(key, int)
|
|
|
|
|
for key in ['log']:
|
|
|
|
|
json_db.register_dict_key(key, lambda x: HTLCOwner(int(x)))
|
|
|
|
|
for key in ['locked_in', 'fails', 'settles']:
|
|
|
|
|
json_db.register_parent_key(key, lambda x: HTLCOwner(int(x)))
|
|
|
|
|
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
class WalletDBUpgrader(Logger):
|
|
|
|
|
def __init__(self, data):
|
|
|
|
|
Logger.__init__(self)
|
|
|
|
|
self.data = data
|
|
|
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
|
return self.data.get(key, default)
|
|
|
|
|
|
|
|
|
|
def put(self, key, value):
|
|
|
|
|
if value is not None:
|
|
|
|
|
self.data[key] = value
|
|
|
|
|
else:
|
|
|
|
|
self.data.pop(key, None)
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
def requires_split(self):
|
|
|
|
|
d = self.get('accounts', {})
|
|
|
|
|
return len(d) > 1
|
|
|
|
|
|
2020-02-05 15:13:37 +01:00
|
|
|
def get_split_accounts(self):
|
2020-02-03 12:35:50 +01:00
|
|
|
result = []
|
|
|
|
|
# backward compatibility with old wallets
|
|
|
|
|
d = self.get('accounts', {})
|
|
|
|
|
if len(d) < 2:
|
|
|
|
|
return
|
|
|
|
|
wallet_type = self.get('wallet_type')
|
|
|
|
|
if wallet_type == 'old':
|
|
|
|
|
assert len(d) == 2
|
|
|
|
|
data1 = copy.deepcopy(self.data)
|
|
|
|
|
data1['accounts'] = {'0': d['0']}
|
|
|
|
|
data1['suffix'] = 'deterministic'
|
|
|
|
|
data2 = copy.deepcopy(self.data)
|
|
|
|
|
data2['accounts'] = {'/x': d['/x']}
|
|
|
|
|
data2['seed'] = None
|
|
|
|
|
data2['seed_version'] = None
|
|
|
|
|
data2['master_public_key'] = None
|
|
|
|
|
data2['wallet_type'] = 'imported'
|
|
|
|
|
data2['suffix'] = 'imported'
|
|
|
|
|
result = [data1, data2]
|
|
|
|
|
|
2022-05-11 19:50:57 +02:00
|
|
|
# note: do not add new hardware types here, this code is for converting legacy wallets
|
|
|
|
|
elif wallet_type in ['bip44', 'trezor', 'keepkey', 'ledger', 'btchip']:
|
2020-02-03 12:35:50 +01:00
|
|
|
mpk = self.get('master_public_keys')
|
|
|
|
|
for k in d.keys():
|
|
|
|
|
i = int(k)
|
|
|
|
|
x = d[k]
|
|
|
|
|
if x.get("pending"):
|
|
|
|
|
continue
|
|
|
|
|
xpub = mpk["x/%d'"%i]
|
|
|
|
|
new_data = copy.deepcopy(self.data)
|
|
|
|
|
# save account, derivation and xpub at index 0
|
|
|
|
|
new_data['accounts'] = {'0': x}
|
|
|
|
|
new_data['master_public_keys'] = {"x/0'": xpub}
|
|
|
|
|
new_data['derivation'] = bip44_derivation(k)
|
|
|
|
|
new_data['suffix'] = k
|
|
|
|
|
result.append(new_data)
|
|
|
|
|
else:
|
2023-10-10 16:57:44 +02:00
|
|
|
raise WalletFileException(f'Unsupported wallet type for split: {wallet_type}')
|
2020-02-03 12:35:50 +01:00
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
def requires_upgrade(self):
|
|
|
|
|
return self.get_seed_version() < FINAL_SEED_VERSION
|
|
|
|
|
|
|
|
|
|
@profiler
|
|
|
|
|
def upgrade(self):
|
|
|
|
|
self.logger.info('upgrading wallet format')
|
|
|
|
|
self._convert_imported()
|
|
|
|
|
self._convert_wallet_type()
|
|
|
|
|
self._convert_account()
|
|
|
|
|
self._convert_version_13_b()
|
|
|
|
|
self._convert_version_14()
|
|
|
|
|
self._convert_version_15()
|
|
|
|
|
self._convert_version_16()
|
|
|
|
|
self._convert_version_17()
|
|
|
|
|
self._convert_version_18()
|
|
|
|
|
self._convert_version_19()
|
|
|
|
|
self._convert_version_20()
|
|
|
|
|
self._convert_version_21()
|
|
|
|
|
self._convert_version_22()
|
|
|
|
|
self._convert_version_23()
|
2020-02-04 12:11:18 +01:00
|
|
|
self._convert_version_24()
|
2020-02-19 11:26:03 +01:00
|
|
|
self._convert_version_25()
|
2020-02-22 18:26:52 +01:00
|
|
|
self._convert_version_26()
|
2020-03-26 06:32:12 +01:00
|
|
|
self._convert_version_27()
|
2020-03-31 12:43:43 +02:00
|
|
|
self._convert_version_28()
|
2020-05-31 12:49:49 +02:00
|
|
|
self._convert_version_29()
|
2020-06-22 22:37:58 +02:00
|
|
|
self._convert_version_30()
|
2020-06-27 02:23:46 +02:00
|
|
|
self._convert_version_31()
|
2020-07-08 00:57:23 +02:00
|
|
|
self._convert_version_32()
|
wallet_db: impl convert_version_33: put 'height' field into invoices
The 'height' field was added in https://github.com/spesmilo/electrum/commit/cdfaaa260942b807f809c2c0414fb242a03e945a
At the time we thought we could just add it with a default value without a db upgrade;
however the issue is that if old code tries to open a new db, it will fail (due to unexpected new field).
Hence it is better to do an explicit conversion where old code *knows* it cannot open the new db.
E | gui.qt.ElectrumGui |
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\__init__.py", line 257, in start_new_window
wallet = self.daemon.load_wallet(path, None)
File "...\electrum\electrum\daemon.py", line 488, in load_wallet
db = WalletDB(storage.read(), manual_upgrades=manual_upgrades)
File "...\electrum\electrum\wallet_db.py", line 72, in __init__
self.load_data(raw)
File "...\electrum\electrum\wallet_db.py", line 103, in load_data
self._after_upgrade_tasks()
File "...\electrum\electrum\wallet_db.py", line 189, in _after_upgrade_tasks
self._load_transactions()
File "...\electrum\electrum\util.py", line 408, in <lambda>
return lambda *args, **kw_args: do_profile(args, kw_args)
File "...\electrum\electrum\util.py", line 404, in do_profile
o = func(*args, **kw_args)
File "...\electrum\electrum\wallet_db.py", line 1139, in _load_transactions
self.data = StoredDict(self.data, self, [])
File "...\electrum\electrum\json_db.py", line 79, in __init__
self.__setitem__(k, v)
File "...\electrum\electrum\json_db.py", line 44, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\json_db.py", line 105, in __setitem__
v = self.db._convert_dict(self.path, key, v)
File "...\electrum\electrum\wallet_db.py", line 1182, in _convert_dict
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\wallet_db.py", line 1182, in <genexpr>
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\invoices.py", line 108, in from_json
return OnchainInvoice(**x)
TypeError: __init__() got an unexpected keyword argument 'height'
2020-12-17 15:17:08 +01:00
|
|
|
self._convert_version_33()
|
2020-12-18 11:05:36 +01:00
|
|
|
self._convert_version_34()
|
2021-01-12 14:57:31 +01:00
|
|
|
self._convert_version_35()
|
2021-01-22 21:38:32 +01:00
|
|
|
self._convert_version_36()
|
2021-02-01 14:17:04 +01:00
|
|
|
self._convert_version_37()
|
2021-02-18 02:18:09 +01:00
|
|
|
self._convert_version_38()
|
2021-03-09 09:55:55 +01:00
|
|
|
self._convert_version_39()
|
2021-03-30 21:16:14 +02:00
|
|
|
self._convert_version_40()
|
2021-06-08 16:21:43 +02:00
|
|
|
self._convert_version_41()
|
2021-09-19 17:35:49 +02:00
|
|
|
self._convert_version_42()
|
2021-09-20 11:57:12 +02:00
|
|
|
self._convert_version_43()
|
2022-01-18 14:54:43 +01:00
|
|
|
self._convert_version_44()
|
2022-03-15 13:03:34 +01:00
|
|
|
self._convert_version_45()
|
2022-04-22 19:53:55 +02:00
|
|
|
self._convert_version_46()
|
2022-06-15 16:24:29 +02:00
|
|
|
self._convert_version_47()
|
2022-07-15 18:26:13 +02:00
|
|
|
self._convert_version_48()
|
2022-08-15 16:03:21 +00:00
|
|
|
self._convert_version_49()
|
2022-08-15 14:14:25 +02:00
|
|
|
self._convert_version_50()
|
2023-02-27 10:31:21 +01:00
|
|
|
self._convert_version_51()
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
self._convert_version_52()
|
2023-08-17 14:04:05 +00:00
|
|
|
self._convert_version_53()
|
2023-08-22 18:10:21 +00:00
|
|
|
self._convert_version_54()
|
2020-02-04 12:45:31 +01:00
|
|
|
self._convert_version_55()
|
2023-10-13 15:49:13 +02:00
|
|
|
self._convert_version_56()
|
wallet db: deduplicate "seed_type" field
In the db, the 'seed_type' field could be present both at the top-level and inside keystores.
Note:
- both fields had usages
- the top-level field was added in 2.8 re "initial segwit support" (3a64ec0f2ed9c845b5c01daa07fafcbfa26c3cb7)
- there was no db upgrade for it, so older files are missing it
- if present, valid values can be electrum types but also
other types supported by the wizard, e.g. "bip39"
- the keystore-level field was added in 4.1 (7b7bba22992eca4536a91249758f178b4a7e044a)
- there was a db upgrade when it was introduced, so old files also have it
- if present, valid values can only be electrum types
- there is not much value in the top-level one having a non-electrum value,
and those values were never used by other parts of the code
- note that when creating a standard wallet from a bip39 seed, the seed is discarded.
Only the derived xprv and the derivation path are kept. If we changed this and also kept the seed,
e.g. to display it to the user, then it would make sense to save the seed type (e.g. "bip39").
However storing that seed_type would make more sense at the keystore level (not top-level).
We delete the top-level 'seed_type' field.
```
{
"keystore": {
"seed_type": "segwit",
...
},
"seed_type": "segwit",
...
}
```
2023-12-01 18:43:37 +00:00
|
|
|
self._convert_version_57()
|
2024-01-23 01:38:17 +00:00
|
|
|
self._convert_version_58()
|
2020-02-03 12:35:50 +01:00
|
|
|
self.put('seed_version', FINAL_SEED_VERSION) # just to be sure
|
|
|
|
|
|
|
|
|
|
def _convert_wallet_type(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(0, 13):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
wallet_type = self.get('wallet_type')
|
|
|
|
|
if wallet_type == 'btchip': wallet_type = 'ledger'
|
|
|
|
|
if self.get('keystore') or self.get('x1/') or wallet_type=='imported':
|
|
|
|
|
return False
|
|
|
|
|
assert not self.requires_split()
|
|
|
|
|
seed_version = self.get_seed_version()
|
|
|
|
|
seed = self.get('seed')
|
|
|
|
|
xpubs = self.get('master_public_keys')
|
|
|
|
|
xprvs = self.get('master_private_keys', {})
|
|
|
|
|
mpk = self.get('master_public_key')
|
|
|
|
|
keypairs = self.get('keypairs')
|
|
|
|
|
key_type = self.get('key_type')
|
|
|
|
|
if seed_version == OLD_SEED_VERSION or wallet_type == 'old':
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'old',
|
|
|
|
|
'seed': seed,
|
|
|
|
|
'mpk': mpk,
|
|
|
|
|
}
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('keystore', d)
|
|
|
|
|
|
|
|
|
|
elif key_type == 'imported':
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'imported',
|
|
|
|
|
'keypairs': keypairs,
|
|
|
|
|
}
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('keystore', d)
|
|
|
|
|
|
|
|
|
|
elif wallet_type in ['xpub', 'standard']:
|
|
|
|
|
xpub = xpubs["x/"]
|
|
|
|
|
xprv = xprvs.get("x/")
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'bip32',
|
|
|
|
|
'xpub': xpub,
|
|
|
|
|
'xprv': xprv,
|
|
|
|
|
'seed': seed,
|
|
|
|
|
}
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('keystore', d)
|
|
|
|
|
|
|
|
|
|
elif wallet_type in ['bip44']:
|
|
|
|
|
xpub = xpubs["x/0'"]
|
|
|
|
|
xprv = xprvs.get("x/0'")
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'bip32',
|
|
|
|
|
'xpub': xpub,
|
|
|
|
|
'xprv': xprv,
|
|
|
|
|
}
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('keystore', d)
|
|
|
|
|
|
2022-05-11 19:50:57 +02:00
|
|
|
# note: do not add new hardware types here, this code is for converting legacy wallets
|
|
|
|
|
elif wallet_type in ['trezor', 'keepkey', 'ledger']:
|
2020-02-03 12:35:50 +01:00
|
|
|
xpub = xpubs["x/0'"]
|
|
|
|
|
derivation = self.get('derivation', bip44_derivation(0))
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'hardware',
|
|
|
|
|
'hw_type': wallet_type,
|
|
|
|
|
'xpub': xpub,
|
|
|
|
|
'derivation': derivation,
|
|
|
|
|
}
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('keystore', d)
|
|
|
|
|
|
|
|
|
|
elif (wallet_type == '2fa') or multisig_type(wallet_type):
|
|
|
|
|
for key in xpubs.keys():
|
|
|
|
|
d = {
|
|
|
|
|
'type': 'bip32',
|
|
|
|
|
'xpub': xpubs[key],
|
|
|
|
|
'xprv': xprvs.get(key),
|
|
|
|
|
}
|
|
|
|
|
if key == 'x1/' and seed:
|
|
|
|
|
d['seed'] = seed
|
|
|
|
|
self.put(key, d)
|
|
|
|
|
else:
|
|
|
|
|
raise WalletFileException('Unable to tell wallet type. Is this even a wallet file?')
|
|
|
|
|
# remove junk
|
|
|
|
|
self.put('master_public_key', None)
|
|
|
|
|
self.put('master_public_keys', None)
|
|
|
|
|
self.put('master_private_keys', None)
|
|
|
|
|
self.put('derivation', None)
|
|
|
|
|
self.put('seed', None)
|
|
|
|
|
self.put('keypairs', None)
|
|
|
|
|
self.put('key_type', None)
|
|
|
|
|
|
|
|
|
|
def _convert_version_13_b(self):
|
|
|
|
|
# version 13 is ambiguous, and has an earlier and a later structure
|
|
|
|
|
if not self._is_upgrade_method_needed(0, 13):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if self.get('wallet_type') == 'standard':
|
|
|
|
|
if self.get('keystore').get('type') == 'imported':
|
|
|
|
|
pubkeys = self.get('keystore').get('keypairs').keys()
|
|
|
|
|
d = {'change': []}
|
|
|
|
|
receiving_addresses = []
|
|
|
|
|
for pubkey in pubkeys:
|
|
|
|
|
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
|
|
|
|
|
receiving_addresses.append(addr)
|
|
|
|
|
d['receiving'] = receiving_addresses
|
|
|
|
|
self.put('addresses', d)
|
|
|
|
|
self.put('pubkeys', None)
|
|
|
|
|
|
|
|
|
|
self.put('seed_version', 13)
|
|
|
|
|
|
|
|
|
|
def _convert_version_14(self):
|
|
|
|
|
# convert imported wallets for 3.0
|
|
|
|
|
if not self._is_upgrade_method_needed(13, 13):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if self.get('wallet_type') =='imported':
|
|
|
|
|
addresses = self.get('addresses')
|
|
|
|
|
if type(addresses) is list:
|
|
|
|
|
addresses = dict([(x, None) for x in addresses])
|
|
|
|
|
self.put('addresses', addresses)
|
|
|
|
|
elif self.get('wallet_type') == 'standard':
|
|
|
|
|
if self.get('keystore').get('type')=='imported':
|
|
|
|
|
addresses = set(self.get('addresses').get('receiving'))
|
|
|
|
|
pubkeys = self.get('keystore').get('keypairs').keys()
|
|
|
|
|
assert len(addresses) == len(pubkeys)
|
|
|
|
|
d = {}
|
|
|
|
|
for pubkey in pubkeys:
|
|
|
|
|
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
|
|
|
|
|
assert addr in addresses
|
|
|
|
|
d[addr] = {
|
|
|
|
|
'pubkey': pubkey,
|
|
|
|
|
'redeem_script': None,
|
|
|
|
|
'type': 'p2pkh'
|
|
|
|
|
}
|
|
|
|
|
self.put('addresses', d)
|
|
|
|
|
self.put('pubkeys', None)
|
|
|
|
|
self.put('wallet_type', 'imported')
|
|
|
|
|
self.put('seed_version', 14)
|
|
|
|
|
|
|
|
|
|
def _convert_version_15(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(14, 14):
|
|
|
|
|
return
|
|
|
|
|
if self.get('seed_type') == 'segwit':
|
|
|
|
|
# should not get here; get_seed_version should have caught this
|
|
|
|
|
raise Exception('unsupported derivation (development segwit, v14)')
|
|
|
|
|
self.put('seed_version', 15)
|
|
|
|
|
|
|
|
|
|
def _convert_version_16(self):
|
|
|
|
|
# fixes issue #3193 for Imported_Wallets with addresses
|
|
|
|
|
# also, previous versions allowed importing any garbage as an address
|
|
|
|
|
# which we now try to remove, see pr #3191
|
|
|
|
|
if not self._is_upgrade_method_needed(15, 15):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
def remove_address(addr):
|
|
|
|
|
def remove_from_dict(dict_name):
|
|
|
|
|
d = self.get(dict_name, None)
|
|
|
|
|
if d is not None:
|
|
|
|
|
d.pop(addr, None)
|
|
|
|
|
self.put(dict_name, d)
|
|
|
|
|
|
|
|
|
|
def remove_from_list(list_name):
|
|
|
|
|
lst = self.get(list_name, None)
|
|
|
|
|
if lst is not None:
|
|
|
|
|
s = set(lst)
|
|
|
|
|
s -= {addr}
|
|
|
|
|
self.put(list_name, list(s))
|
|
|
|
|
|
|
|
|
|
# note: we don't remove 'addr' from self.get('addresses')
|
|
|
|
|
remove_from_dict('addr_history')
|
|
|
|
|
remove_from_dict('labels')
|
|
|
|
|
remove_from_dict('payment_requests')
|
|
|
|
|
remove_from_list('frozen_addresses')
|
|
|
|
|
|
|
|
|
|
if self.get('wallet_type') == 'imported':
|
|
|
|
|
addresses = self.get('addresses')
|
|
|
|
|
assert isinstance(addresses, dict)
|
|
|
|
|
addresses_new = dict()
|
|
|
|
|
for address, details in addresses.items():
|
|
|
|
|
if not bitcoin.is_address(address):
|
|
|
|
|
remove_address(address)
|
|
|
|
|
continue
|
|
|
|
|
if details is None:
|
|
|
|
|
addresses_new[address] = {}
|
|
|
|
|
else:
|
|
|
|
|
addresses_new[address] = details
|
|
|
|
|
self.put('addresses', addresses_new)
|
|
|
|
|
|
|
|
|
|
self.put('seed_version', 16)
|
|
|
|
|
|
|
|
|
|
def _convert_version_17(self):
|
|
|
|
|
# delete pruned_txo; construct spent_outpoints
|
|
|
|
|
if not self._is_upgrade_method_needed(16, 16):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
self.put('pruned_txo', None)
|
|
|
|
|
|
|
|
|
|
transactions = self.get('transactions', {}) # txid -> raw_tx
|
|
|
|
|
spent_outpoints = defaultdict(dict)
|
|
|
|
|
for txid, raw_tx in transactions.items():
|
|
|
|
|
tx = Transaction(raw_tx)
|
|
|
|
|
for txin in tx.inputs():
|
|
|
|
|
if txin.is_coinbase_input():
|
|
|
|
|
continue
|
|
|
|
|
prevout_hash = txin.prevout.txid.hex()
|
|
|
|
|
prevout_n = txin.prevout.out_idx
|
|
|
|
|
spent_outpoints[prevout_hash][str(prevout_n)] = txid
|
|
|
|
|
self.put('spent_outpoints', spent_outpoints)
|
|
|
|
|
|
|
|
|
|
self.put('seed_version', 17)
|
|
|
|
|
|
|
|
|
|
def _convert_version_18(self):
|
|
|
|
|
# delete verified_tx3 as its structure changed
|
|
|
|
|
if not self._is_upgrade_method_needed(17, 17):
|
|
|
|
|
return
|
|
|
|
|
self.put('verified_tx3', None)
|
|
|
|
|
self.put('seed_version', 18)
|
|
|
|
|
|
|
|
|
|
def _convert_version_19(self):
|
|
|
|
|
# delete tx_fees as its structure changed
|
|
|
|
|
if not self._is_upgrade_method_needed(18, 18):
|
|
|
|
|
return
|
|
|
|
|
self.put('tx_fees', None)
|
|
|
|
|
self.put('seed_version', 19)
|
|
|
|
|
|
|
|
|
|
def _convert_version_20(self):
|
|
|
|
|
# store 'derivation' (prefix) and 'root_fingerprint' in all xpub-based keystores.
|
|
|
|
|
# store explicit None values if we cannot retroactively determine them
|
|
|
|
|
if not self._is_upgrade_method_needed(19, 19):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
from .bip32 import BIP32Node, convert_bip32_intpath_to_strpath
|
|
|
|
|
# note: This upgrade method reimplements bip32.root_fp_and_der_prefix_from_xkey.
|
|
|
|
|
# This is done deliberately, to avoid introducing that method as a dependency to this upgrade.
|
|
|
|
|
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
|
|
|
|
|
ks = self.get(ks_name, None)
|
|
|
|
|
if ks is None: continue
|
|
|
|
|
xpub = ks.get('xpub', None)
|
|
|
|
|
if xpub is None: continue
|
|
|
|
|
bip32node = BIP32Node.from_xkey(xpub)
|
|
|
|
|
# derivation prefix
|
|
|
|
|
derivation_prefix = ks.get('derivation', None)
|
|
|
|
|
if derivation_prefix is None:
|
|
|
|
|
assert bip32node.depth >= 0, bip32node.depth
|
|
|
|
|
if bip32node.depth == 0:
|
|
|
|
|
derivation_prefix = 'm'
|
|
|
|
|
elif bip32node.depth == 1:
|
|
|
|
|
child_number_int = int.from_bytes(bip32node.child_number, 'big')
|
|
|
|
|
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
|
|
|
|
|
ks['derivation'] = derivation_prefix
|
|
|
|
|
# root fingerprint
|
|
|
|
|
root_fingerprint = ks.get('ckcc_xfp', None)
|
|
|
|
|
if root_fingerprint is not None:
|
|
|
|
|
root_fingerprint = root_fingerprint.to_bytes(4, byteorder="little", signed=False).hex().lower()
|
|
|
|
|
if root_fingerprint is None:
|
|
|
|
|
if bip32node.depth == 0:
|
|
|
|
|
root_fingerprint = bip32node.calc_fingerprint_of_this_node().hex().lower()
|
|
|
|
|
elif bip32node.depth == 1:
|
|
|
|
|
root_fingerprint = bip32node.fingerprint.hex()
|
|
|
|
|
ks['root_fingerprint'] = root_fingerprint
|
|
|
|
|
ks.pop('ckcc_xfp', None)
|
|
|
|
|
self.put(ks_name, ks)
|
|
|
|
|
|
|
|
|
|
self.put('seed_version', 20)
|
|
|
|
|
|
|
|
|
|
def _convert_version_21(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(20, 20):
|
|
|
|
|
return
|
|
|
|
|
channels = self.get('channels')
|
|
|
|
|
if channels:
|
|
|
|
|
for channel in channels:
|
|
|
|
|
channel['state'] = 'OPENING'
|
|
|
|
|
self.put('channels', channels)
|
|
|
|
|
self.put('seed_version', 21)
|
|
|
|
|
|
|
|
|
|
def _convert_version_22(self):
|
|
|
|
|
# construct prevouts_by_scripthash
|
|
|
|
|
if not self._is_upgrade_method_needed(21, 21):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
from .bitcoin import script_to_scripthash
|
|
|
|
|
transactions = self.get('transactions', {}) # txid -> raw_tx
|
|
|
|
|
prevouts_by_scripthash = defaultdict(list)
|
|
|
|
|
for txid, raw_tx in transactions.items():
|
|
|
|
|
tx = Transaction(raw_tx)
|
|
|
|
|
for idx, txout in enumerate(tx.outputs()):
|
|
|
|
|
outpoint = f"{txid}:{idx}"
|
|
|
|
|
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
|
|
|
|
|
prevouts_by_scripthash[scripthash].append((outpoint, txout.value))
|
|
|
|
|
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
|
|
|
|
|
|
|
|
|
|
self.put('seed_version', 22)
|
|
|
|
|
|
|
|
|
|
def _convert_version_23(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(22, 22):
|
|
|
|
|
return
|
|
|
|
|
channels = self.get('channels', [])
|
|
|
|
|
LOCAL = 1
|
|
|
|
|
REMOTE = -1
|
|
|
|
|
for c in channels:
|
|
|
|
|
# move revocation store from remote_config
|
|
|
|
|
r = c['remote_config'].pop('revocation_store')
|
|
|
|
|
c['revocation_store'] = r
|
|
|
|
|
# convert fee updates
|
|
|
|
|
log = c.get('log', {})
|
|
|
|
|
for sub in LOCAL, REMOTE:
|
|
|
|
|
l = log[str(sub)]['fee_updates']
|
|
|
|
|
d = {}
|
|
|
|
|
for i, fu in enumerate(l):
|
|
|
|
|
d[str(i)] = {
|
|
|
|
|
'rate':fu['rate'],
|
|
|
|
|
'ctn_local':fu['ctns'][str(LOCAL)],
|
|
|
|
|
'ctn_remote':fu['ctns'][str(REMOTE)]
|
|
|
|
|
}
|
|
|
|
|
log[str(int(sub))]['fee_updates'] = d
|
|
|
|
|
self.data['channels'] = channels
|
|
|
|
|
|
|
|
|
|
self.data['seed_version'] = 23
|
|
|
|
|
|
2020-02-04 12:11:18 +01:00
|
|
|
def _convert_version_24(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(23, 23):
|
|
|
|
|
return
|
|
|
|
|
channels = self.get('channels', [])
|
|
|
|
|
for c in channels:
|
|
|
|
|
# convert revocation store to dict
|
|
|
|
|
r = c['revocation_store']
|
|
|
|
|
d = {}
|
|
|
|
|
for i in range(49):
|
|
|
|
|
v = r['buckets'][i]
|
|
|
|
|
if v is not None:
|
|
|
|
|
d[str(i)] = v
|
|
|
|
|
r['buckets'] = d
|
|
|
|
|
c['revocation_store'] = r
|
|
|
|
|
# convert channels to dict
|
2021-03-21 00:34:25 -04:00
|
|
|
self.data['channels'] = {x['channel_id']: x for x in channels}
|
2020-02-04 12:11:18 +01:00
|
|
|
# convert txi & txo
|
|
|
|
|
txi = self.get('txi', {})
|
2020-12-18 14:30:47 +01:00
|
|
|
for tx_hash, d in list(txi.items()):
|
2020-02-04 12:11:18 +01:00
|
|
|
d2 = {}
|
|
|
|
|
for addr, l in d.items():
|
|
|
|
|
d2[addr] = {}
|
|
|
|
|
for ser, v in l:
|
|
|
|
|
d2[addr][ser] = v
|
|
|
|
|
txi[tx_hash] = d2
|
|
|
|
|
self.data['txi'] = txi
|
|
|
|
|
txo = self.get('txo', {})
|
2020-12-18 14:30:47 +01:00
|
|
|
for tx_hash, d in list(txo.items()):
|
2020-02-04 12:11:18 +01:00
|
|
|
d2 = {}
|
|
|
|
|
for addr, l in d.items():
|
|
|
|
|
d2[addr] = {}
|
|
|
|
|
for n, v, cb in l:
|
|
|
|
|
d2[addr][str(n)] = (v, cb)
|
|
|
|
|
txo[tx_hash] = d2
|
|
|
|
|
self.data['txo'] = txo
|
|
|
|
|
|
|
|
|
|
self.data['seed_version'] = 24
|
2020-02-03 12:35:50 +01:00
|
|
|
|
2020-02-19 11:26:03 +01:00
|
|
|
def _convert_version_25(self):
|
2022-04-23 20:15:10 +02:00
|
|
|
from .crypto import sha256
|
2020-02-19 11:26:03 +01:00
|
|
|
if not self._is_upgrade_method_needed(24, 24):
|
|
|
|
|
return
|
2020-03-05 17:15:09 +01:00
|
|
|
# add 'type' field to onchain requests
|
2021-09-19 17:32:25 +02:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
2020-02-19 11:39:52 +01:00
|
|
|
requests = self.data.get('payment_requests', {})
|
2020-02-19 11:26:03 +01:00
|
|
|
for k, r in list(requests.items()):
|
|
|
|
|
if r.get('address') == k:
|
|
|
|
|
requests[k] = {
|
|
|
|
|
'address': r['address'],
|
|
|
|
|
'amount': r.get('amount'),
|
|
|
|
|
'exp': r.get('exp'),
|
|
|
|
|
'id': r.get('id'),
|
|
|
|
|
'memo': r.get('memo'),
|
|
|
|
|
'time': r.get('time'),
|
|
|
|
|
'type': PR_TYPE_ONCHAIN,
|
|
|
|
|
}
|
2022-04-23 20:15:10 +02:00
|
|
|
# delete bip70 invoices
|
|
|
|
|
# note: this upgrade was changed ~2 years after-the-fact to delete instead of converting
|
2020-02-19 11:39:52 +01:00
|
|
|
invoices = self.data.get('invoices', {})
|
2020-02-19 11:26:03 +01:00
|
|
|
for k, r in list(invoices.items()):
|
|
|
|
|
data = r.get("hex")
|
2022-04-23 20:15:10 +02:00
|
|
|
pr_id = sha256(bytes.fromhex(data))[0:16].hex()
|
|
|
|
|
if pr_id != k:
|
|
|
|
|
continue
|
|
|
|
|
del invoices[k]
|
2020-02-19 11:26:03 +01:00
|
|
|
self.data['seed_version'] = 25
|
|
|
|
|
|
2020-02-22 18:26:52 +01:00
|
|
|
def _convert_version_26(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(25, 25):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
channel_timestamps = self.data.pop('lightning_channel_timestamps', {})
|
|
|
|
|
for channel_id, c in channels.items():
|
|
|
|
|
item = channel_timestamps.get(channel_id)
|
|
|
|
|
if item:
|
|
|
|
|
funding_txid, funding_height, funding_timestamp, closing_txid, closing_height, closing_timestamp = item
|
|
|
|
|
if funding_txid:
|
|
|
|
|
c['funding_height'] = funding_txid, funding_height, funding_timestamp
|
|
|
|
|
if closing_txid:
|
|
|
|
|
c['closing_height'] = closing_txid, closing_height, closing_timestamp
|
|
|
|
|
self.data['seed_version'] = 26
|
|
|
|
|
|
2020-03-26 06:32:12 +01:00
|
|
|
def _convert_version_27(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(26, 26):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
for channel_id, c in channels.items():
|
|
|
|
|
c['local_config']['htlc_minimum_msat'] = 1
|
|
|
|
|
self.data['seed_version'] = 27
|
|
|
|
|
|
2020-03-31 12:43:43 +02:00
|
|
|
def _convert_version_28(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(27, 27):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
for channel_id, c in channels.items():
|
2020-04-06 16:53:48 +02:00
|
|
|
c['local_config']['channel_seed'] = None
|
2020-03-31 12:43:43 +02:00
|
|
|
self.data['seed_version'] = 28
|
|
|
|
|
|
2020-05-31 12:49:49 +02:00
|
|
|
def _convert_version_29(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(28, 28):
|
|
|
|
|
return
|
2021-09-19 17:32:25 +02:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
2020-05-31 12:49:49 +02:00
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, r in list(d.items()):
|
|
|
|
|
_type = r.get('type', 0)
|
|
|
|
|
item = {
|
|
|
|
|
'type': _type,
|
|
|
|
|
'message': r.get('message') or r.get('memo', ''),
|
|
|
|
|
'amount': r.get('amount'),
|
2020-06-14 03:39:35 +02:00
|
|
|
'exp': r.get('exp') or 0,
|
2020-05-31 12:49:49 +02:00
|
|
|
'time': r.get('time', 0),
|
|
|
|
|
}
|
|
|
|
|
if _type == PR_TYPE_ONCHAIN:
|
|
|
|
|
address = r.pop('address', None)
|
|
|
|
|
if address:
|
|
|
|
|
outputs = [(0, address, r.get('amount'))]
|
|
|
|
|
else:
|
|
|
|
|
outputs = r.get('outputs')
|
|
|
|
|
item.update({
|
|
|
|
|
'outputs': outputs,
|
|
|
|
|
'id': r.get('id'),
|
|
|
|
|
'bip70': r.get('bip70'),
|
|
|
|
|
'requestor': r.get('requestor'),
|
|
|
|
|
})
|
|
|
|
|
else:
|
|
|
|
|
item.update({
|
|
|
|
|
'rhash': r['rhash'],
|
|
|
|
|
'invoice': r['invoice'],
|
|
|
|
|
})
|
|
|
|
|
d[key] = item
|
|
|
|
|
self.data['seed_version'] = 29
|
|
|
|
|
|
2020-06-22 22:37:58 +02:00
|
|
|
def _convert_version_30(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(29, 29):
|
|
|
|
|
return
|
2021-09-19 17:32:25 +02:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
|
|
|
|
PR_TYPE_LN = 2
|
2020-06-22 22:37:58 +02:00
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
_type = item['type']
|
|
|
|
|
if _type == PR_TYPE_ONCHAIN:
|
|
|
|
|
item['amount_sat'] = item.pop('amount')
|
|
|
|
|
elif _type == PR_TYPE_LN:
|
|
|
|
|
amount_sat = item.pop('amount')
|
|
|
|
|
item['amount_msat'] = 1000 * amount_sat if amount_sat is not None else None
|
|
|
|
|
item.pop('exp')
|
|
|
|
|
item.pop('message')
|
|
|
|
|
item.pop('rhash')
|
|
|
|
|
item.pop('time')
|
|
|
|
|
else:
|
|
|
|
|
raise Exception(f"unknown invoice type: {_type}")
|
|
|
|
|
self.data['seed_version'] = 30
|
|
|
|
|
|
2020-06-27 02:23:46 +02:00
|
|
|
def _convert_version_31(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(30, 30):
|
|
|
|
|
return
|
2021-09-19 17:32:25 +02:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
2020-06-27 02:23:46 +02:00
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
if item['type'] == PR_TYPE_ONCHAIN:
|
|
|
|
|
item['amount_sat'] = item['amount_sat'] or 0
|
|
|
|
|
item['exp'] = item['exp'] or 0
|
|
|
|
|
item['time'] = item['time'] or 0
|
|
|
|
|
self.data['seed_version'] = 31
|
|
|
|
|
|
2020-07-08 00:57:23 +02:00
|
|
|
def _convert_version_32(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(31, 31):
|
|
|
|
|
return
|
2021-01-12 14:57:31 +01:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
2020-07-08 00:57:23 +02:00
|
|
|
invoices_old = self.data.get('invoices', {})
|
|
|
|
|
invoices_new = {k: item for k, item in invoices_old.items()
|
|
|
|
|
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
|
|
|
|
|
self.data['invoices'] = invoices_new
|
|
|
|
|
self.data['seed_version'] = 32
|
|
|
|
|
|
wallet_db: impl convert_version_33: put 'height' field into invoices
The 'height' field was added in https://github.com/spesmilo/electrum/commit/cdfaaa260942b807f809c2c0414fb242a03e945a
At the time we thought we could just add it with a default value without a db upgrade;
however the issue is that if old code tries to open a new db, it will fail (due to unexpected new field).
Hence it is better to do an explicit conversion where old code *knows* it cannot open the new db.
E | gui.qt.ElectrumGui |
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\__init__.py", line 257, in start_new_window
wallet = self.daemon.load_wallet(path, None)
File "...\electrum\electrum\daemon.py", line 488, in load_wallet
db = WalletDB(storage.read(), manual_upgrades=manual_upgrades)
File "...\electrum\electrum\wallet_db.py", line 72, in __init__
self.load_data(raw)
File "...\electrum\electrum\wallet_db.py", line 103, in load_data
self._after_upgrade_tasks()
File "...\electrum\electrum\wallet_db.py", line 189, in _after_upgrade_tasks
self._load_transactions()
File "...\electrum\electrum\util.py", line 408, in <lambda>
return lambda *args, **kw_args: do_profile(args, kw_args)
File "...\electrum\electrum\util.py", line 404, in do_profile
o = func(*args, **kw_args)
File "...\electrum\electrum\wallet_db.py", line 1139, in _load_transactions
self.data = StoredDict(self.data, self, [])
File "...\electrum\electrum\json_db.py", line 79, in __init__
self.__setitem__(k, v)
File "...\electrum\electrum\json_db.py", line 44, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\json_db.py", line 105, in __setitem__
v = self.db._convert_dict(self.path, key, v)
File "...\electrum\electrum\wallet_db.py", line 1182, in _convert_dict
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\wallet_db.py", line 1182, in <genexpr>
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\invoices.py", line 108, in from_json
return OnchainInvoice(**x)
TypeError: __init__() got an unexpected keyword argument 'height'
2020-12-17 15:17:08 +01:00
|
|
|
def _convert_version_33(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(32, 32):
|
|
|
|
|
return
|
2021-01-12 14:57:31 +01:00
|
|
|
PR_TYPE_ONCHAIN = 0
|
wallet_db: impl convert_version_33: put 'height' field into invoices
The 'height' field was added in https://github.com/spesmilo/electrum/commit/cdfaaa260942b807f809c2c0414fb242a03e945a
At the time we thought we could just add it with a default value without a db upgrade;
however the issue is that if old code tries to open a new db, it will fail (due to unexpected new field).
Hence it is better to do an explicit conversion where old code *knows* it cannot open the new db.
E | gui.qt.ElectrumGui |
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\__init__.py", line 257, in start_new_window
wallet = self.daemon.load_wallet(path, None)
File "...\electrum\electrum\daemon.py", line 488, in load_wallet
db = WalletDB(storage.read(), manual_upgrades=manual_upgrades)
File "...\electrum\electrum\wallet_db.py", line 72, in __init__
self.load_data(raw)
File "...\electrum\electrum\wallet_db.py", line 103, in load_data
self._after_upgrade_tasks()
File "...\electrum\electrum\wallet_db.py", line 189, in _after_upgrade_tasks
self._load_transactions()
File "...\electrum\electrum\util.py", line 408, in <lambda>
return lambda *args, **kw_args: do_profile(args, kw_args)
File "...\electrum\electrum\util.py", line 404, in do_profile
o = func(*args, **kw_args)
File "...\electrum\electrum\wallet_db.py", line 1139, in _load_transactions
self.data = StoredDict(self.data, self, [])
File "...\electrum\electrum\json_db.py", line 79, in __init__
self.__setitem__(k, v)
File "...\electrum\electrum\json_db.py", line 44, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\json_db.py", line 105, in __setitem__
v = self.db._convert_dict(self.path, key, v)
File "...\electrum\electrum\wallet_db.py", line 1182, in _convert_dict
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\wallet_db.py", line 1182, in <genexpr>
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
File "...\electrum\electrum\invoices.py", line 108, in from_json
return OnchainInvoice(**x)
TypeError: __init__() got an unexpected keyword argument 'height'
2020-12-17 15:17:08 +01:00
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
if item['type'] == PR_TYPE_ONCHAIN:
|
|
|
|
|
item['height'] = item.get('height') or 0
|
|
|
|
|
self.data['seed_version'] = 33
|
|
|
|
|
|
2020-12-18 11:05:36 +01:00
|
|
|
def _convert_version_34(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(33, 33):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
for key, item in channels.items():
|
|
|
|
|
item['local_config']['upfront_shutdown_script'] = \
|
|
|
|
|
item['local_config'].get('upfront_shutdown_script') or ""
|
|
|
|
|
item['remote_config']['upfront_shutdown_script'] = \
|
|
|
|
|
item['remote_config'].get('upfront_shutdown_script') or ""
|
|
|
|
|
self.data['seed_version'] = 34
|
|
|
|
|
|
2021-01-12 14:57:31 +01:00
|
|
|
def _convert_version_35(self):
|
|
|
|
|
# same as 32, but for payment_requests
|
|
|
|
|
if not self._is_upgrade_method_needed(34, 34):
|
|
|
|
|
return
|
|
|
|
|
PR_TYPE_ONCHAIN = 0
|
|
|
|
|
requests_old = self.data.get('payment_requests', {})
|
|
|
|
|
requests_new = {k: item for k, item in requests_old.items()
|
|
|
|
|
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
|
|
|
|
|
self.data['payment_requests'] = requests_new
|
|
|
|
|
self.data['seed_version'] = 35
|
|
|
|
|
|
2021-01-22 21:38:32 +01:00
|
|
|
def _convert_version_36(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(35, 35):
|
|
|
|
|
return
|
|
|
|
|
old_frozen_coins = self.data.get('frozen_coins', [])
|
|
|
|
|
new_frozen_coins = {coin: True for coin in old_frozen_coins}
|
|
|
|
|
self.data['frozen_coins'] = new_frozen_coins
|
|
|
|
|
self.data['seed_version'] = 36
|
|
|
|
|
|
2021-02-01 14:17:04 +01:00
|
|
|
def _convert_version_37(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(36, 36):
|
|
|
|
|
return
|
|
|
|
|
payments = self.data.get('lightning_payments', {})
|
|
|
|
|
for k, v in list(payments.items()):
|
|
|
|
|
amount_sat, direction, status = v
|
|
|
|
|
amount_msat = amount_sat * 1000 if amount_sat is not None else None
|
|
|
|
|
payments[k] = amount_msat, direction, status
|
|
|
|
|
self.data['lightning_payments'] = payments
|
|
|
|
|
self.data['seed_version'] = 37
|
|
|
|
|
|
2021-02-18 02:18:09 +01:00
|
|
|
def _convert_version_38(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(37, 37):
|
|
|
|
|
return
|
|
|
|
|
PR_TYPE_ONCHAIN = 0
|
|
|
|
|
PR_TYPE_LN = 2
|
|
|
|
|
from .bitcoin import TOTAL_COIN_SUPPLY_LIMIT_IN_BTC, COIN
|
|
|
|
|
max_sats = TOTAL_COIN_SUPPLY_LIMIT_IN_BTC * COIN
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
if item['type'] == PR_TYPE_ONCHAIN:
|
|
|
|
|
amount_sat = item['amount_sat']
|
|
|
|
|
if amount_sat == '!':
|
|
|
|
|
continue
|
|
|
|
|
if not (isinstance(amount_sat, int) and 0 <= amount_sat <= max_sats):
|
|
|
|
|
del d[key]
|
|
|
|
|
elif item['type'] == PR_TYPE_LN:
|
|
|
|
|
amount_msat = item['amount_msat']
|
|
|
|
|
if not amount_msat:
|
|
|
|
|
continue
|
|
|
|
|
if not (isinstance(amount_msat, int) and 0 <= amount_msat <= max_sats * 1000):
|
|
|
|
|
del d[key]
|
|
|
|
|
self.data['seed_version'] = 38
|
|
|
|
|
|
2021-03-09 09:55:55 +01:00
|
|
|
def _convert_version_39(self):
|
|
|
|
|
# this upgrade prevents initialization of lightning_privkey2 after lightning_xprv has been set
|
|
|
|
|
if not self._is_upgrade_method_needed(38, 38):
|
|
|
|
|
return
|
|
|
|
|
self.data['imported_channel_backups'] = self.data.pop('channel_backups', {})
|
|
|
|
|
self.data['seed_version'] = 39
|
|
|
|
|
|
2021-03-30 21:16:14 +02:00
|
|
|
def _convert_version_40(self):
|
|
|
|
|
# put 'seed_type' into keystores
|
|
|
|
|
if not self._is_upgrade_method_needed(39, 39):
|
|
|
|
|
return
|
|
|
|
|
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
|
|
|
|
|
ks = self.data.get(ks_name, None)
|
|
|
|
|
if ks is None: continue
|
|
|
|
|
seed = ks.get('seed')
|
|
|
|
|
if not seed: continue
|
|
|
|
|
seed_type = None
|
|
|
|
|
xpub = ks.get('xpub') or None
|
|
|
|
|
if xpub:
|
|
|
|
|
assert isinstance(xpub, str)
|
|
|
|
|
if xpub[0:4] in ('xpub', 'tpub'):
|
|
|
|
|
seed_type = 'standard'
|
|
|
|
|
elif xpub[0:4] in ('zpub', 'Zpub', 'vpub', 'Vpub'):
|
|
|
|
|
seed_type = 'segwit'
|
|
|
|
|
elif ks.get('type') == 'old':
|
|
|
|
|
seed_type = 'old'
|
|
|
|
|
if seed_type is not None:
|
|
|
|
|
ks['seed_type'] = seed_type
|
|
|
|
|
self.data['seed_version'] = 40
|
|
|
|
|
|
2021-06-08 16:21:43 +02:00
|
|
|
def _convert_version_41(self):
|
|
|
|
|
# this is a repeat of upgrade 39, to fix wallet backup files (see #7339)
|
|
|
|
|
if not self._is_upgrade_method_needed(40, 40):
|
|
|
|
|
return
|
|
|
|
|
imported_channel_backups = self.data.pop('channel_backups', {})
|
|
|
|
|
imported_channel_backups.update(self.data.get('imported_channel_backups', {}))
|
|
|
|
|
self.data['imported_channel_backups'] = imported_channel_backups
|
|
|
|
|
self.data['seed_version'] = 41
|
|
|
|
|
|
2021-09-19 17:35:49 +02:00
|
|
|
def _convert_version_42(self):
|
|
|
|
|
# in OnchainInvoice['outputs'], convert values from None to 0
|
|
|
|
|
if not self._is_upgrade_method_needed(41, 41):
|
|
|
|
|
return
|
|
|
|
|
PR_TYPE_ONCHAIN = 0
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
if item['type'] == PR_TYPE_ONCHAIN:
|
|
|
|
|
item['outputs'] = [(_type, addr, (val or 0))
|
|
|
|
|
for _type, addr, val in item['outputs']]
|
|
|
|
|
self.data['seed_version'] = 42
|
|
|
|
|
|
2021-09-20 11:57:12 +02:00
|
|
|
def _convert_version_43(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(42, 42):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.pop('channels', {})
|
|
|
|
|
for k, c in channels.items():
|
|
|
|
|
log = c['log']
|
|
|
|
|
c['fail_htlc_reasons'] = log.pop('fail_htlc_reasons', {})
|
|
|
|
|
c['unfulfilled_htlcs'] = log.pop('unfulfilled_htlcs', {})
|
|
|
|
|
log["1"]['unacked_updates'] = log.pop('unacked_local_updates2', {})
|
|
|
|
|
self.data['channels'] = channels
|
|
|
|
|
self.data['seed_version'] = 43
|
|
|
|
|
|
2022-01-18 14:54:43 +01:00
|
|
|
def _convert_version_44(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(43, 43):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
for key, item in channels.items():
|
wallet_db: handle legacy channels in convert_version_44
I still have a mainnet wallet with some pre-static-remotekey channels
(though those channels are closed) that I do not want to delete yet.
follow-up https://github.com/spesmilo/electrum/pull/7636
```
E | gui.qt.exception_window.Exception_Hook | exception caught by crash reporter
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\__init__.py", line 307, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\gui\qt\__init__.py", line 332, in start_new_window
wallet = self._start_wizard_to_select_or_create_wallet(path)
File "...\electrum\electrum\gui\qt\__init__.py", line 377, in _start_wizard_to_select_or_create_wallet
db = WalletDB(storage.read(), manual_upgrades=False)
File "...\electrum\electrum\wallet_db.py", line 73, in __init__
self.load_data(raw)
File "...\electrum\electrum\wallet_db.py", line 106, in load_data
self.upgrade()
File "...\electrum\electrum\util.py", line 439, in <lambda>
return lambda *args, **kw_args: do_profile(args, kw_args)
File "...\electrum\electrum\util.py", line 435, in do_profile
o = func(*args, **kw_args)
File "...\electrum\electrum\wallet_db.py", line 195, in upgrade
self._convert_version_44()
File "...\electrum\electrum\wallet_db.py", line 859, in _convert_version_44
if item['static_remotekey_enabled']:
KeyError: 'static_remotekey_enabled'
```
2022-02-25 20:44:05 +01:00
|
|
|
if bool(item.get('static_remotekey_enabled')):
|
2022-01-18 14:54:43 +01:00
|
|
|
channel_type = ChannelType.OPTION_STATIC_REMOTEKEY
|
|
|
|
|
else:
|
|
|
|
|
channel_type = ChannelType(0)
|
wallet_db: handle legacy channels in convert_version_44
I still have a mainnet wallet with some pre-static-remotekey channels
(though those channels are closed) that I do not want to delete yet.
follow-up https://github.com/spesmilo/electrum/pull/7636
```
E | gui.qt.exception_window.Exception_Hook | exception caught by crash reporter
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\__init__.py", line 307, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\gui\qt\__init__.py", line 332, in start_new_window
wallet = self._start_wizard_to_select_or_create_wallet(path)
File "...\electrum\electrum\gui\qt\__init__.py", line 377, in _start_wizard_to_select_or_create_wallet
db = WalletDB(storage.read(), manual_upgrades=False)
File "...\electrum\electrum\wallet_db.py", line 73, in __init__
self.load_data(raw)
File "...\electrum\electrum\wallet_db.py", line 106, in load_data
self.upgrade()
File "...\electrum\electrum\util.py", line 439, in <lambda>
return lambda *args, **kw_args: do_profile(args, kw_args)
File "...\electrum\electrum\util.py", line 435, in do_profile
o = func(*args, **kw_args)
File "...\electrum\electrum\wallet_db.py", line 195, in upgrade
self._convert_version_44()
File "...\electrum\electrum\wallet_db.py", line 859, in _convert_version_44
if item['static_remotekey_enabled']:
KeyError: 'static_remotekey_enabled'
```
2022-02-25 20:44:05 +01:00
|
|
|
item.pop('static_remotekey_enabled', None)
|
2022-01-18 14:54:43 +01:00
|
|
|
item['channel_type'] = channel_type
|
|
|
|
|
self.data['seed_version'] = 44
|
|
|
|
|
|
2022-03-15 13:03:34 +01:00
|
|
|
def _convert_version_45(self):
|
|
|
|
|
from .lnaddr import lndecode
|
|
|
|
|
if not self._is_upgrade_method_needed(44, 44):
|
|
|
|
|
return
|
|
|
|
|
swaps = self.data.get('submarine_swaps', {})
|
|
|
|
|
for key, item in swaps.items():
|
|
|
|
|
item['receive_address'] = None
|
|
|
|
|
# note: we set height to zero
|
|
|
|
|
# the new key for all requests is a wallet address, not done here
|
|
|
|
|
for name in ['invoices', 'payment_requests']:
|
|
|
|
|
invoices = self.data.get(name, {})
|
|
|
|
|
for key, item in invoices.items():
|
|
|
|
|
is_lightning = item['type'] == 2
|
|
|
|
|
lightning_invoice = item['invoice'] if is_lightning else None
|
|
|
|
|
outputs = item['outputs'] if not is_lightning else None
|
|
|
|
|
bip70 = item['bip70'] if not is_lightning else None
|
|
|
|
|
if is_lightning:
|
|
|
|
|
lnaddr = lndecode(item['invoice'])
|
|
|
|
|
amount_msat = lnaddr.get_amount_msat()
|
|
|
|
|
timestamp = lnaddr.date
|
|
|
|
|
exp_delay = lnaddr.get_expiry()
|
|
|
|
|
message = lnaddr.get_description()
|
|
|
|
|
height = 0
|
|
|
|
|
else:
|
|
|
|
|
amount_sat = item['amount_sat']
|
|
|
|
|
amount_msat = amount_sat * 1000 if amount_sat not in [None, '!'] else amount_sat
|
|
|
|
|
message = item['message']
|
|
|
|
|
timestamp = item['time']
|
|
|
|
|
exp_delay = item['exp']
|
|
|
|
|
height = item['height']
|
|
|
|
|
|
|
|
|
|
invoices[key] = {
|
|
|
|
|
'amount_msat':amount_msat,
|
|
|
|
|
'message':message,
|
|
|
|
|
'time':timestamp,
|
|
|
|
|
'exp':exp_delay,
|
|
|
|
|
'height':height,
|
|
|
|
|
'outputs':outputs,
|
|
|
|
|
'bip70':bip70,
|
|
|
|
|
'lightning_invoice':lightning_invoice,
|
|
|
|
|
}
|
|
|
|
|
self.data['seed_version'] = 45
|
|
|
|
|
|
2022-08-15 14:14:25 +02:00
|
|
|
def _convert_invoices_keys(self, invoices):
|
2022-04-22 19:53:55 +02:00
|
|
|
# recalc keys of outgoing on-chain invoices
|
2022-08-15 14:14:25 +02:00
|
|
|
from .crypto import sha256d
|
2022-04-22 19:53:55 +02:00
|
|
|
def get_id_from_onchain_outputs(raw_outputs, timestamp):
|
|
|
|
|
outputs = [PartialTxOutput.from_legacy_tuple(*output) for output in raw_outputs]
|
|
|
|
|
outputs_str = "\n".join(f"{txout.scriptpubkey.hex()}, {txout.value}" for txout in outputs)
|
|
|
|
|
return sha256d(outputs_str + "%d" % timestamp).hex()[0:10]
|
|
|
|
|
for key, item in list(invoices.items()):
|
|
|
|
|
is_lightning = item['lightning_invoice'] is not None
|
|
|
|
|
if is_lightning:
|
|
|
|
|
continue
|
|
|
|
|
outputs_raw = item['outputs']
|
|
|
|
|
assert outputs_raw, outputs_raw
|
|
|
|
|
timestamp = item['time']
|
|
|
|
|
newkey = get_id_from_onchain_outputs(outputs_raw, timestamp)
|
|
|
|
|
if newkey != key:
|
|
|
|
|
invoices[newkey] = item
|
|
|
|
|
del invoices[key]
|
2022-08-15 14:14:25 +02:00
|
|
|
|
|
|
|
|
def _convert_version_46(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(45, 45):
|
|
|
|
|
return
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
self._convert_invoices_keys(invoices)
|
2022-04-22 19:53:55 +02:00
|
|
|
self.data['seed_version'] = 46
|
|
|
|
|
|
2022-06-15 16:24:29 +02:00
|
|
|
def _convert_version_47(self):
|
|
|
|
|
from .lnaddr import lndecode
|
|
|
|
|
if not self._is_upgrade_method_needed(46, 46):
|
|
|
|
|
return
|
|
|
|
|
# recalc keys of requests
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
for key, item in list(requests.items()):
|
|
|
|
|
lnaddr = item.get('lightning_invoice')
|
|
|
|
|
if lnaddr:
|
|
|
|
|
lnaddr = lndecode(lnaddr)
|
|
|
|
|
rhash = lnaddr.paymenthash.hex()
|
|
|
|
|
if key != rhash:
|
|
|
|
|
requests[rhash] = item
|
|
|
|
|
del requests[key]
|
|
|
|
|
self.data['seed_version'] = 47
|
|
|
|
|
|
2022-07-15 18:26:13 +02:00
|
|
|
def _convert_version_48(self):
|
|
|
|
|
# fix possible corruption of invoice amounts, see #7774
|
|
|
|
|
if not self._is_upgrade_method_needed(47, 47):
|
|
|
|
|
return
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for key, item in list(invoices.items()):
|
|
|
|
|
if item['amount_msat'] == 1000 * "!":
|
|
|
|
|
item['amount_msat'] = "!"
|
|
|
|
|
self.data['seed_version'] = 48
|
|
|
|
|
|
2022-08-15 16:03:21 +00:00
|
|
|
def _convert_version_49(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(48, 48):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
legacy_chans = [chan_dict for chan_dict in channels.values()
|
|
|
|
|
if chan_dict['channel_type'] == ChannelType.OPTION_LEGACY_CHANNEL]
|
|
|
|
|
if legacy_chans:
|
|
|
|
|
raise WalletFileException(
|
|
|
|
|
f"This wallet contains {len(legacy_chans)} lightning channels of type 'LEGACY'. "
|
|
|
|
|
f"These channels were created using unreleased development versions of Electrum "
|
|
|
|
|
f"before the first lightning-capable release of 4.0, and are not supported anymore. "
|
|
|
|
|
f"Please use Electrum 4.3.0 to open this wallet, close the channels, "
|
|
|
|
|
f"and delete them from the wallet."
|
|
|
|
|
)
|
|
|
|
|
self.data['seed_version'] = 49
|
|
|
|
|
|
2022-08-15 14:14:25 +02:00
|
|
|
def _convert_version_50(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(49, 49):
|
|
|
|
|
return
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
self._convert_invoices_keys(requests)
|
|
|
|
|
self.data['seed_version'] = 50
|
|
|
|
|
|
2023-02-27 10:31:21 +01:00
|
|
|
def _convert_version_51(self):
|
|
|
|
|
from .lnaddr import lndecode
|
|
|
|
|
if not self._is_upgrade_method_needed(50, 50):
|
|
|
|
|
return
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
for key, item in list(requests.items()):
|
|
|
|
|
lightning_invoice = item.pop('lightning_invoice')
|
|
|
|
|
if lightning_invoice is None:
|
|
|
|
|
payment_hash = None
|
|
|
|
|
else:
|
|
|
|
|
lnaddr = lndecode(lightning_invoice)
|
|
|
|
|
payment_hash = lnaddr.paymenthash.hex()
|
|
|
|
|
item['payment_hash'] = payment_hash
|
|
|
|
|
self.data['seed_version'] = 51
|
|
|
|
|
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
def _detect_insane_version_51(self) -> int:
|
|
|
|
|
"""Returns 0 if file okay,
|
|
|
|
|
error code 1: multisig wallet has old_mpk
|
|
|
|
|
error code 2: multisig wallet has mixed Ypub/Zpub
|
|
|
|
|
"""
|
|
|
|
|
assert self.get('seed_version') == 51
|
|
|
|
|
xpub_type = None
|
|
|
|
|
for ks_name in ['x{}/'.format(i) for i in range(1, 16)]: # having any such field <=> multisig wallet
|
|
|
|
|
ks = self.data.get(ks_name, None)
|
|
|
|
|
if ks is None: continue
|
|
|
|
|
ks_type = ks.get('type')
|
|
|
|
|
if ks_type == "old":
|
|
|
|
|
return 1 # error
|
|
|
|
|
assert ks_type in ("bip32", "hardware"), f"unexpected {ks_type=}"
|
|
|
|
|
xpub = ks.get('xpub') or None
|
|
|
|
|
assert xpub is not None
|
|
|
|
|
assert isinstance(xpub, str)
|
|
|
|
|
if xpub_type is None: # first iter
|
|
|
|
|
xpub_type = xpub[0:4]
|
|
|
|
|
if xpub[0:4] != xpub_type:
|
|
|
|
|
return 2 # error
|
|
|
|
|
# looks okay
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
def _convert_version_52(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(51, 51):
|
|
|
|
|
return
|
|
|
|
|
if (error_code := self._detect_insane_version_51()) != 0:
|
|
|
|
|
# should not get here; get_seed_version should have caught this
|
|
|
|
|
raise Exception(f'unsupported wallet file: version_51 with error {error_code}')
|
|
|
|
|
self.data['seed_version'] = 52
|
|
|
|
|
|
2023-08-17 14:04:05 +00:00
|
|
|
def _convert_version_53(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(52, 52):
|
|
|
|
|
return
|
|
|
|
|
cbs = self.data.get('imported_channel_backups', {})
|
|
|
|
|
for channel_id, cb in list(cbs.items()):
|
|
|
|
|
if 'local_payment_pubkey' not in cb:
|
|
|
|
|
cb['local_payment_pubkey'] = None
|
|
|
|
|
self.data['seed_version'] = 53
|
|
|
|
|
|
2023-08-22 18:10:21 +00:00
|
|
|
def _convert_version_54(self):
|
|
|
|
|
# note: similar to convert_version_38
|
|
|
|
|
if not self._is_upgrade_method_needed(53, 53):
|
|
|
|
|
return
|
|
|
|
|
from .bitcoin import TOTAL_COIN_SUPPLY_LIMIT_IN_BTC, COIN
|
|
|
|
|
max_sats = TOTAL_COIN_SUPPLY_LIMIT_IN_BTC * COIN
|
|
|
|
|
requests = self.data.get('payment_requests', {})
|
|
|
|
|
invoices = self.data.get('invoices', {})
|
|
|
|
|
for d in [invoices, requests]:
|
|
|
|
|
for key, item in list(d.items()):
|
|
|
|
|
amount_msat = item['amount_msat']
|
|
|
|
|
if amount_msat == '!':
|
|
|
|
|
continue
|
|
|
|
|
if not (isinstance(amount_msat, int) and 0 <= amount_msat <= max_sats * 1000):
|
|
|
|
|
del d[key]
|
|
|
|
|
self.data['seed_version'] = 54
|
|
|
|
|
|
2020-02-04 12:45:31 +01:00
|
|
|
def _convert_version_55(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(54, 54):
|
|
|
|
|
return
|
|
|
|
|
# do not use '/' in dict keys
|
|
|
|
|
for key in list(self.data.keys()):
|
|
|
|
|
if key.endswith('/'):
|
|
|
|
|
self.data[key[:-1]] = self.data.pop(key)
|
|
|
|
|
self.data['seed_version'] = 55
|
|
|
|
|
|
2023-10-13 15:49:13 +02:00
|
|
|
def _convert_version_56(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(55, 55):
|
|
|
|
|
return
|
|
|
|
|
channels = self.data.get('channels', {})
|
|
|
|
|
for key, item in channels.items():
|
|
|
|
|
item['constraints']['flags'] = 0
|
|
|
|
|
for c in ['local_config', 'remote_config']:
|
|
|
|
|
item[c]['announcement_node_sig'] = ''
|
|
|
|
|
item[c]['announcement_bitcoin_sig'] = ''
|
|
|
|
|
item['local_config'].pop('was_announced')
|
|
|
|
|
self.data['seed_version'] = 56
|
|
|
|
|
|
wallet db: deduplicate "seed_type" field
In the db, the 'seed_type' field could be present both at the top-level and inside keystores.
Note:
- both fields had usages
- the top-level field was added in 2.8 re "initial segwit support" (3a64ec0f2ed9c845b5c01daa07fafcbfa26c3cb7)
- there was no db upgrade for it, so older files are missing it
- if present, valid values can be electrum types but also
other types supported by the wizard, e.g. "bip39"
- the keystore-level field was added in 4.1 (7b7bba22992eca4536a91249758f178b4a7e044a)
- there was a db upgrade when it was introduced, so old files also have it
- if present, valid values can only be electrum types
- there is not much value in the top-level one having a non-electrum value,
and those values were never used by other parts of the code
- note that when creating a standard wallet from a bip39 seed, the seed is discarded.
Only the derived xprv and the derivation path are kept. If we changed this and also kept the seed,
e.g. to display it to the user, then it would make sense to save the seed type (e.g. "bip39").
However storing that seed_type would make more sense at the keystore level (not top-level).
We delete the top-level 'seed_type' field.
```
{
"keystore": {
"seed_type": "segwit",
...
},
"seed_type": "segwit",
...
}
```
2023-12-01 18:43:37 +00:00
|
|
|
def _convert_version_57(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(56, 56):
|
|
|
|
|
return
|
|
|
|
|
# The 'seed_type' field could be present both at the top-level and inside keystores.
|
|
|
|
|
# We delete the one that is top-level.
|
|
|
|
|
self.data.pop('seed_type', None)
|
|
|
|
|
self.data['seed_version'] = 57
|
|
|
|
|
|
2024-01-23 01:38:17 +00:00
|
|
|
def _convert_version_58(self):
|
|
|
|
|
# re-construct prevouts_by_scripthash
|
|
|
|
|
# new structure: scripthash -> outpoint -> value
|
|
|
|
|
if not self._is_upgrade_method_needed(57, 57):
|
|
|
|
|
return
|
|
|
|
|
from .bitcoin import script_to_scripthash
|
|
|
|
|
transactions = self.get('transactions', {}) # txid -> raw_tx
|
|
|
|
|
prevouts_by_scripthash = {}
|
|
|
|
|
for txid, raw_tx in transactions.items():
|
2024-02-26 18:54:21 +00:00
|
|
|
try:
|
|
|
|
|
tx = PartialTransaction.from_raw_psbt(raw_tx)
|
|
|
|
|
except BadHeaderMagic:
|
|
|
|
|
tx = Transaction(raw_tx)
|
2024-01-23 01:38:17 +00:00
|
|
|
for idx, txout in enumerate(tx.outputs()):
|
|
|
|
|
outpoint = f"{txid}:{idx}"
|
|
|
|
|
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
|
|
|
|
|
if scripthash not in prevouts_by_scripthash:
|
|
|
|
|
prevouts_by_scripthash[scripthash] = {}
|
|
|
|
|
prevouts_by_scripthash[scripthash][outpoint] = txout.value
|
|
|
|
|
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
|
|
|
|
|
self.data['seed_version'] = 58
|
|
|
|
|
|
2020-02-03 12:35:50 +01:00
|
|
|
def _convert_imported(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(0, 13):
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# '/x' is the internal ID for imported accounts
|
|
|
|
|
d = self.get('accounts', {}).get('/x', {}).get('imported',{})
|
|
|
|
|
if not d:
|
|
|
|
|
return False
|
|
|
|
|
addresses = []
|
|
|
|
|
keypairs = {}
|
|
|
|
|
for addr, v in d.items():
|
|
|
|
|
pubkey, privkey = v
|
|
|
|
|
if privkey:
|
|
|
|
|
keypairs[pubkey] = privkey
|
|
|
|
|
else:
|
|
|
|
|
addresses.append(addr)
|
|
|
|
|
if addresses and keypairs:
|
|
|
|
|
raise WalletFileException('mixed addresses and privkeys')
|
|
|
|
|
elif addresses:
|
|
|
|
|
self.put('addresses', addresses)
|
|
|
|
|
self.put('accounts', None)
|
|
|
|
|
elif keypairs:
|
|
|
|
|
self.put('wallet_type', 'standard')
|
|
|
|
|
self.put('key_type', 'imported')
|
|
|
|
|
self.put('keypairs', keypairs)
|
|
|
|
|
self.put('accounts', None)
|
|
|
|
|
else:
|
|
|
|
|
raise WalletFileException('no addresses or privkeys')
|
|
|
|
|
|
|
|
|
|
def _convert_account(self):
|
|
|
|
|
if not self._is_upgrade_method_needed(0, 13):
|
|
|
|
|
return
|
|
|
|
|
self.put('accounts', None)
|
|
|
|
|
|
|
|
|
|
def _is_upgrade_method_needed(self, min_version, max_version):
|
|
|
|
|
assert min_version <= max_version
|
|
|
|
|
cur_version = self.get_seed_version()
|
|
|
|
|
if cur_version > max_version:
|
|
|
|
|
return False
|
|
|
|
|
elif cur_version < min_version:
|
|
|
|
|
raise WalletFileException(
|
|
|
|
|
'storage upgrade: unexpected version {} (should be {}-{})'
|
|
|
|
|
.format(cur_version, min_version, max_version))
|
|
|
|
|
else:
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def get_seed_version(self):
|
|
|
|
|
seed_version = self.get('seed_version')
|
|
|
|
|
if not seed_version:
|
|
|
|
|
seed_version = OLD_SEED_VERSION if len(self.get('master_public_key','')) == 128 else NEW_SEED_VERSION
|
|
|
|
|
if seed_version > FINAL_SEED_VERSION:
|
|
|
|
|
raise WalletFileException('This version of Electrum is too old to open this wallet.\n'
|
|
|
|
|
'(highest supported storage version: {}, version of this file: {})'
|
|
|
|
|
.format(FINAL_SEED_VERSION, seed_version))
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
if seed_version == 14 and self.get('seed_type') == 'segwit':
|
2020-02-03 12:35:50 +01:00
|
|
|
self._raise_unsupported_version(seed_version)
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
if seed_version == 51 and self._detect_insane_version_51():
|
|
|
|
|
self._raise_unsupported_version(seed_version)
|
|
|
|
|
if seed_version >= 12:
|
2020-02-03 12:35:50 +01:00
|
|
|
return seed_version
|
|
|
|
|
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
|
|
|
|
|
self._raise_unsupported_version(seed_version)
|
|
|
|
|
return seed_version
|
|
|
|
|
|
|
|
|
|
def _raise_unsupported_version(self, seed_version):
|
|
|
|
|
msg = f"Your wallet has an unsupported seed version: {seed_version}."
|
|
|
|
|
if seed_version in [5, 7, 8, 9, 10, 14]:
|
|
|
|
|
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
|
|
|
|
|
if seed_version == 6:
|
|
|
|
|
# version 1.9.8 created v6 wallets when an incorrect seed was entered in the restore dialog
|
|
|
|
|
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
|
|
|
|
|
if self.get('master_public_keys') is None and self.get('master_private_keys') is None and self.get('imported_keys') is None:
|
|
|
|
|
# pbkdf2 (at that time an additional dependency) was not included with the binaries, and wallet creation aborted.
|
|
|
|
|
msg += "\nIt does not contain any keys, and can safely be removed."
|
|
|
|
|
else:
|
|
|
|
|
# creation was complete if electrum was run from source
|
|
|
|
|
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
|
wallet_db version 52: break non-homogeneous multisig wallets
- case 1: in version 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with an old_mpk as cosigner.
- case 2: in version 4.4.0, 4.4.1, 4.4.2, the qml GUI wizard allowed creating multisig wallets with mixed xpub/Ypub/Zpub.
The corresponding missing input validation was a bug in the wizard, it was unintended behaviour. Validation was added in d2cf21fc2bcf79f07b7e41178cd3e4ca9e3d9f68. Note however that there might be users who created such wallet files.
Re case 1 wallet files: there is no version of Electrum that allows spending from such a wallet. Coins received at addresses are not burned, however it is technically challenging to spend them. (unless the multisig can spend without needing the old_mpk cosigner in the quorum).
Re case 2 wallet files: it is possible to create a corresponding spending wallet for such a multisig, however it is a bit tricky. The script type for the addresses in such a heterogeneous xpub wallet is based on the xpub_type of the first keystore. So e.g. given a wallet file [Yprv1, Zpub2] it will have sh(wsh()) scripts, and the cosigner should create a wallet file [Ypub1, Zprv2] (same order).
Technically case 2 wallet files could be "fixed" automatically by converting the xpub types as part of a wallet_db upgrade. However if the wallet files also contain seeds, those cannot be converted ("standard" vs "segwit" electrum seed).
Case 1 wallet files are not possible to "fix" automatically as the cosigner using the old_mpk is not bip32 based.
It is unclear if there are *any* users out there affected by this. I suspect for case 1 it is very likely there are none (not many people have pre-2.0 electrum seeds which were never supported as part of a multisig who would also now try to create a multisig using them); for case 2 however there might be.
This commit breaks both case 1 and case 2 wallets: these wallet files can no longer be opened in new Electrum, an error message is shown and the crash reporter opens. If any potential users opt to send crash reports, at least we will know they exist and can help them recover.
2023-05-11 13:48:54 +00:00
|
|
|
if seed_version == 51:
|
|
|
|
|
error_code = self._detect_insane_version_51()
|
|
|
|
|
assert error_code != 0
|
|
|
|
|
msg += f" ({error_code=})"
|
|
|
|
|
if error_code == 1:
|
|
|
|
|
msg += "\nThis is a multisig wallet containing an old_mpk (pre-bip32 master public key)."
|
|
|
|
|
msg += "\nPlease contact us to help recover it by opening an issue on GitHub."
|
|
|
|
|
elif error_code == 2:
|
|
|
|
|
msg += ("\nThis is a multisig wallet containing mixed xpub/Ypub/Zpub."
|
|
|
|
|
"\nThe script type is determined by the type of the first keystore."
|
|
|
|
|
"\nTo recover, you should re-create the wallet with matching type "
|
|
|
|
|
"(converted if needed) master keys."
|
|
|
|
|
"\nOr you can contact us to help recover it by opening an issue on GitHub.")
|
|
|
|
|
else:
|
|
|
|
|
raise Exception(f"unexpected {error_code=}")
|
|
|
|
|
raise WalletFileExceptionVersion51(msg, should_report_crash=True)
|
|
|
|
|
# generic exception
|
2020-02-03 12:35:50 +01:00
|
|
|
raise WalletFileException(msg)
|
|
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
|
2024-01-23 02:20:01 +00:00
|
|
|
def upgrade_wallet_db(data: dict, do_upgrade: bool) -> Tuple[dict, bool]:
|
2023-09-24 11:29:13 +02:00
|
|
|
was_upgraded = False
|
|
|
|
|
|
|
|
|
|
if len(data) == 0:
|
|
|
|
|
# create new DB
|
|
|
|
|
data['seed_version'] = FINAL_SEED_VERSION
|
|
|
|
|
# store this for debugging purposes
|
|
|
|
|
v = DBMetadata(
|
|
|
|
|
creation_timestamp=int(time.time()),
|
|
|
|
|
first_electrum_version_used=ELECTRUM_VERSION,
|
|
|
|
|
)
|
|
|
|
|
assert data.get("db_metadata", None) is None
|
|
|
|
|
data["db_metadata"] = v
|
|
|
|
|
was_upgraded = True
|
|
|
|
|
|
|
|
|
|
dbu = WalletDBUpgrader(data)
|
|
|
|
|
if dbu.requires_split():
|
|
|
|
|
raise WalletRequiresSplit(dbu.get_split_accounts())
|
|
|
|
|
if dbu.requires_upgrade() and do_upgrade:
|
|
|
|
|
dbu.upgrade()
|
|
|
|
|
was_upgraded = True
|
|
|
|
|
if dbu.requires_upgrade():
|
|
|
|
|
raise WalletRequiresUpgrade()
|
|
|
|
|
return dbu.data, was_upgraded
|
|
|
|
|
|
|
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
class WalletDB(JsonDB):
|
|
|
|
|
|
2024-01-23 02:20:01 +00:00
|
|
|
def __init__(
|
|
|
|
|
self,
|
|
|
|
|
s: str,
|
|
|
|
|
*,
|
|
|
|
|
storage: Optional['WalletStorage'] = None,
|
|
|
|
|
upgrade: bool = False,
|
|
|
|
|
):
|
2024-02-12 18:26:08 +00:00
|
|
|
JsonDB.__init__(self, s, storage=storage, encoder=MyEncoder, upgrader=partial(upgrade_wallet_db, do_upgrade=upgrade))
|
2023-08-18 15:13:33 +02:00
|
|
|
# create pointers
|
|
|
|
|
self.load_transactions()
|
|
|
|
|
# load plugins that are conditional on wallet type
|
|
|
|
|
self.load_plugins()
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def get_seed_version(self):
|
|
|
|
|
return self.get('seed_version')
|
2023-03-13 15:15:50 +00:00
|
|
|
|
|
|
|
|
def get_db_metadata(self) -> Optional[DBMetadata]:
|
|
|
|
|
# field only present for wallet files created with ver 4.4.0 or later
|
|
|
|
|
return self.get("db_metadata")
|
|
|
|
|
|
2020-02-03 12:35:50 +01:00
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_txi_addresses(self, tx_hash: str) -> List[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
"""Returns list of is_mine addresses that appear as inputs in tx."""
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.txi.get(tx_hash, {}).keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_txo_addresses(self, tx_hash: str) -> List[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
"""Returns list of is_mine addresses that appear as outputs in tx."""
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.txo.get(tx_hash, {}).keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_txi_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[str, int]]:
|
2020-02-03 12:35:50 +01:00
|
|
|
"""Returns an iterable of (prev_outpoint, value)."""
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
|
|
|
|
assert isinstance(address, str)
|
2020-02-04 12:11:18 +01:00
|
|
|
d = self.txi.get(tx_hash, {}).get(address, {})
|
|
|
|
|
return list(d.items())
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@locked
|
2020-10-16 21:51:01 +02:00
|
|
|
def get_txo_addr(self, tx_hash: str, address: str) -> Dict[int, Tuple[int, bool]]:
|
|
|
|
|
"""Returns a dict: output_index -> (value, is_coinbase)."""
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
|
|
|
|
assert isinstance(address, str)
|
2020-02-04 12:11:18 +01:00
|
|
|
d = self.txo.get(tx_hash, {}).get(address, {})
|
2020-10-16 21:51:01 +02:00
|
|
|
return {int(n): (v, cb) for (n, (v, cb)) in d.items()}
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_txi_addr(self, tx_hash: str, addr: str, ser: str, v: int) -> None:
|
|
|
|
|
assert isinstance(tx_hash, str)
|
|
|
|
|
assert isinstance(addr, str)
|
|
|
|
|
assert isinstance(ser, str)
|
|
|
|
|
assert isinstance(v, int)
|
2020-02-03 12:35:50 +01:00
|
|
|
if tx_hash not in self.txi:
|
|
|
|
|
self.txi[tx_hash] = {}
|
|
|
|
|
d = self.txi[tx_hash]
|
|
|
|
|
if addr not in d:
|
2020-02-04 12:11:18 +01:00
|
|
|
d[addr] = {}
|
|
|
|
|
d[addr][ser] = v
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_txo_addr(self, tx_hash: str, addr: str, n: Union[int, str], v: int, is_coinbase: bool) -> None:
|
|
|
|
|
n = str(n)
|
|
|
|
|
assert isinstance(tx_hash, str)
|
|
|
|
|
assert isinstance(addr, str)
|
|
|
|
|
assert isinstance(n, str)
|
|
|
|
|
assert isinstance(v, int)
|
|
|
|
|
assert isinstance(is_coinbase, bool)
|
2020-02-03 12:35:50 +01:00
|
|
|
if tx_hash not in self.txo:
|
|
|
|
|
self.txo[tx_hash] = {}
|
|
|
|
|
d = self.txo[tx_hash]
|
|
|
|
|
if addr not in d:
|
2020-02-04 12:11:18 +01:00
|
|
|
d[addr] = {}
|
|
|
|
|
d[addr][n] = (v, is_coinbase)
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def list_txi(self) -> Sequence[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.txi.keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def list_txo(self) -> Sequence[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.txo.keys())
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_txi(self, tx_hash: str) -> None:
|
|
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.txi.pop(tx_hash, None)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_txo(self, tx_hash: str) -> None:
|
|
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.txo.pop(tx_hash, None)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def list_spent_outpoints(self) -> Sequence[Tuple[str, str]]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return [(h, n)
|
|
|
|
|
for h in self.spent_outpoints.keys()
|
|
|
|
|
for n in self.get_spent_outpoints(h)
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_spent_outpoints(self, prevout_hash: str) -> Sequence[str]:
|
|
|
|
|
assert isinstance(prevout_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.spent_outpoints.get(prevout_hash, {}).keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> Optional[str]:
|
|
|
|
|
assert isinstance(prevout_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
prevout_n = str(prevout_n)
|
|
|
|
|
return self.spent_outpoints.get(prevout_hash, {}).get(prevout_n)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> None:
|
|
|
|
|
assert isinstance(prevout_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
prevout_n = str(prevout_n)
|
|
|
|
|
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
|
|
|
|
|
if not self.spent_outpoints[prevout_hash]:
|
|
|
|
|
self.spent_outpoints.pop(prevout_hash)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def set_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str], tx_hash: str) -> None:
|
|
|
|
|
assert isinstance(prevout_hash, str)
|
|
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
prevout_n = str(prevout_n)
|
|
|
|
|
if prevout_hash not in self.spent_outpoints:
|
|
|
|
|
self.spent_outpoints[prevout_hash] = {}
|
|
|
|
|
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
|
|
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def add_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(scripthash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
assert isinstance(prevout, TxOutpoint)
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(value, int)
|
2020-02-03 12:35:50 +01:00
|
|
|
if scripthash not in self._prevouts_by_scripthash:
|
2024-01-23 01:38:17 +00:00
|
|
|
self._prevouts_by_scripthash[scripthash] = dict()
|
|
|
|
|
self._prevouts_by_scripthash[scripthash][prevout.to_str()] = value
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def remove_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(scripthash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
assert isinstance(prevout, TxOutpoint)
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(value, int)
|
2024-01-23 01:38:17 +00:00
|
|
|
self._prevouts_by_scripthash[scripthash].pop(prevout.to_str(), None)
|
2020-02-03 12:35:50 +01:00
|
|
|
if not self._prevouts_by_scripthash[scripthash]:
|
|
|
|
|
self._prevouts_by_scripthash.pop(scripthash)
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def get_prevouts_by_scripthash(self, scripthash: str) -> Set[Tuple[TxOutpoint, int]]:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(scripthash, str)
|
2024-01-23 01:38:17 +00:00
|
|
|
prevouts_and_values = self._prevouts_by_scripthash.get(scripthash, {})
|
|
|
|
|
return {(TxOutpoint.from_str(prevout), value) for prevout, value in prevouts_and_values.items()}
|
2020-02-03 12:35:50 +01:00
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def add_transaction(self, tx_hash: str, tx: Transaction) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
assert isinstance(tx, Transaction), tx
|
|
|
|
|
# note that tx might be a PartialTransaction
|
2021-02-15 09:20:31 +01:00
|
|
|
# serialize and de-serialize tx now. this might e.g. convert a complete PartialTx to a Tx
|
|
|
|
|
tx = tx_from_any(str(tx))
|
2020-02-03 12:35:50 +01:00
|
|
|
if not tx_hash:
|
|
|
|
|
raise Exception("trying to add tx to db without txid")
|
|
|
|
|
if tx_hash != tx.txid():
|
|
|
|
|
raise Exception(f"trying to add tx to db with inconsistent txid: {tx_hash} != {tx.txid()}")
|
|
|
|
|
# don't allow overwriting complete tx with partial tx
|
|
|
|
|
tx_we_already_have = self.transactions.get(tx_hash, None)
|
|
|
|
|
if tx_we_already_have is None or isinstance(tx_we_already_have, PartialTransaction):
|
|
|
|
|
self.transactions[tx_hash] = tx
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_transaction(self, tx_hash: str) -> Optional[Transaction]:
|
|
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return self.transactions.pop(tx_hash, None)
|
|
|
|
|
|
|
|
|
|
@locked
|
wallet_db.get_transaction: tolerate if tx_hash is None
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\main_window.py", line 1503, in do_pay
self.do_pay_invoice(invoice)
File "...\electrum\electrum\gui\qt\main_window.py", line 1516, in do_pay_invoice
self.pay_onchain_dialog(self.get_coins(), outputs)
File "...\electrum\electrum\gui\qt\main_window.py", line 1570, in pay_onchain_dialog
self.preview_tx_dialog(make_tx=make_tx,
File "...\electrum\electrum\gui\qt\main_window.py", line 1574, in preview_tx_dialog
d = PreviewTxDialog(make_tx=make_tx, external_keypairs=external_keypairs,
File "...\electrum\electrum\gui\qt\transaction_dialog.py", line 654, in __init__
self.update()
File "...\electrum\electrum\gui\qt\transaction_dialog.py", line 392, in update
tx_details = self.wallet.get_tx_info(self.tx)
File "...\electrum\electrum\wallet.py", line 486, in get_tx_info
tx_we_already_have_in_db = self.db.get_transaction(tx_hash)
File "...\electrum\electrum\json_db.py", line 44, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\wallet_db.py", line 822, in get_transaction
assert isinstance(tx_hash, str)
AssertionError
Traceback (most recent call last):
File "...\electrum\electrum\gui\qt\confirm_tx_dialog.py", line 65, in timer_actions
self.update()
File "...\electrum\electrum\gui\qt\transaction_dialog.py", line 392, in update
tx_details = self.wallet.get_tx_info(self.tx)
File "...\electrum\electrum\wallet.py", line 486, in get_tx_info
tx_we_already_have_in_db = self.db.get_transaction(tx_hash)
File "...\electrum\electrum\json_db.py", line 44, in wrapper
return func(self, *args, **kwargs)
File "...\electrum\electrum\wallet_db.py", line 822, in get_transaction
if tx_hash is None:
AssertionError
2020-02-28 20:23:50 +01:00
|
|
|
def get_transaction(self, tx_hash: Optional[str]) -> Optional[Transaction]:
|
|
|
|
|
if tx_hash is None:
|
|
|
|
|
return None
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(tx_hash, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return self.transactions.get(tx_hash)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def list_transactions(self) -> Sequence[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.transactions.keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_history(self) -> Sequence[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.history.keys())
|
|
|
|
|
|
2020-02-24 18:26:49 +01:00
|
|
|
def is_addr_in_history(self, addr: str) -> bool:
|
2020-02-03 12:35:50 +01:00
|
|
|
# does not mean history is non-empty!
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return addr in self.history
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_addr_history(self, addr: str) -> Sequence[Tuple[str, int]]:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return self.history.get(addr, [])
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def set_addr_history(self, addr: str, hist) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.history[addr] = hist
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_addr_history(self, addr: str) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.history.pop(addr, None)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def list_verified_tx(self) -> Sequence[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
return list(self.verified_tx.keys())
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_verified_tx(self, txid: str) -> Optional[TxMinedInfo]:
|
|
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
if txid not in self.verified_tx:
|
|
|
|
|
return None
|
|
|
|
|
height, timestamp, txpos, header_hash = self.verified_tx[txid]
|
|
|
|
|
return TxMinedInfo(height=height,
|
|
|
|
|
conf=None,
|
|
|
|
|
timestamp=timestamp,
|
|
|
|
|
txpos=txpos,
|
|
|
|
|
header_hash=header_hash)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_verified_tx(self, txid: str, info: TxMinedInfo):
|
|
|
|
|
assert isinstance(txid, str)
|
|
|
|
|
assert isinstance(info, TxMinedInfo)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.verified_tx[txid] = (info.height, info.timestamp, info.txpos, info.header_hash)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_verified_tx(self, txid: str):
|
|
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.verified_tx.pop(txid, None)
|
|
|
|
|
|
2020-02-24 18:26:49 +01:00
|
|
|
def is_in_verified_tx(self, txid: str) -> bool:
|
|
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return txid in self.verified_tx
|
|
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def add_tx_fee_from_server(self, txid: str, fee_sat: Optional[int]) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
# note: when called with (fee_sat is None), rm currently saved value
|
|
|
|
|
if txid not in self.tx_fees:
|
|
|
|
|
self.tx_fees[txid] = TxFeesValue()
|
|
|
|
|
tx_fees_value = self.tx_fees[txid]
|
|
|
|
|
if tx_fees_value.is_calculated_by_us:
|
|
|
|
|
return
|
|
|
|
|
self.tx_fees[txid] = tx_fees_value._replace(fee=fee_sat, is_calculated_by_us=False)
|
|
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def add_tx_fee_we_calculated(self, txid: str, fee_sat: Optional[int]) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
if fee_sat is None:
|
|
|
|
|
return
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(fee_sat, int)
|
2020-02-03 12:35:50 +01:00
|
|
|
if txid not in self.tx_fees:
|
|
|
|
|
self.tx_fees[txid] = TxFeesValue()
|
|
|
|
|
self.tx_fees[txid] = self.tx_fees[txid]._replace(fee=fee_sat, is_calculated_by_us=True)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_tx_fee(self, txid: str, *, trust_server: bool = False) -> Optional[int]:
|
|
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
"""Returns tx_fee."""
|
|
|
|
|
tx_fees_value = self.tx_fees.get(txid)
|
|
|
|
|
if tx_fees_value is None:
|
|
|
|
|
return None
|
|
|
|
|
if not trust_server and not tx_fees_value.is_calculated_by_us:
|
|
|
|
|
return None
|
|
|
|
|
return tx_fees_value.fee
|
|
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def add_num_inputs_to_tx(self, txid: str, num_inputs: int) -> None:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(txid, str)
|
|
|
|
|
assert isinstance(num_inputs, int)
|
2020-02-03 12:35:50 +01:00
|
|
|
if txid not in self.tx_fees:
|
|
|
|
|
self.tx_fees[txid] = TxFeesValue()
|
|
|
|
|
self.tx_fees[txid] = self.tx_fees[txid]._replace(num_inputs=num_inputs)
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def get_num_all_inputs_of_tx(self, txid: str) -> Optional[int]:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
tx_fees_value = self.tx_fees.get(txid)
|
|
|
|
|
if tx_fees_value is None:
|
|
|
|
|
return None
|
|
|
|
|
return tx_fees_value.num_inputs
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def get_num_ismine_inputs_of_tx(self, txid: str) -> int:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
txins = self.txi.get(txid, {})
|
|
|
|
|
return sum([len(tupls) for addr, tupls in txins.items()])
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_tx_fee(self, txid: str) -> None:
|
|
|
|
|
assert isinstance(txid, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.tx_fees.pop(txid, None)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def num_change_addresses(self) -> int:
|
2020-02-03 12:35:50 +01:00
|
|
|
return len(self.change_addresses)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def num_receiving_addresses(self) -> int:
|
2020-02-03 12:35:50 +01:00
|
|
|
return len(self.receiving_addresses)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_change_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
# note: slicing makes a shallow copy
|
|
|
|
|
return self.change_addresses[slice_start:slice_stop]
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_receiving_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
|
2020-02-03 12:35:50 +01:00
|
|
|
# note: slicing makes a shallow copy
|
|
|
|
|
return self.receiving_addresses[slice_start:slice_stop]
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_change_address(self, addr: str) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self._addr_to_addr_index[addr] = (1, len(self.change_addresses))
|
|
|
|
|
self.change_addresses.append(addr)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_receiving_address(self, addr: str) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self._addr_to_addr_index[addr] = (0, len(self.receiving_addresses))
|
|
|
|
|
self.receiving_addresses.append(addr)
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_address_index(self, address: str) -> Optional[Sequence[int]]:
|
|
|
|
|
assert isinstance(address, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return self._addr_to_addr_index.get(address)
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def add_imported_address(self, addr: str, d: dict) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.imported_addresses[addr] = d
|
|
|
|
|
|
|
|
|
|
@modifier
|
2020-02-24 18:26:49 +01:00
|
|
|
def remove_imported_address(self, addr: str) -> None:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
self.imported_addresses.pop(addr)
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def has_imported_address(self, addr: str) -> bool:
|
2020-02-24 18:26:49 +01:00
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return addr in self.imported_addresses
|
|
|
|
|
|
|
|
|
|
@locked
|
|
|
|
|
def get_imported_addresses(self) -> Sequence[str]:
|
|
|
|
|
return list(sorted(self.imported_addresses.keys()))
|
|
|
|
|
|
|
|
|
|
@locked
|
2020-02-24 18:26:49 +01:00
|
|
|
def get_imported_address(self, addr: str) -> Optional[dict]:
|
|
|
|
|
assert isinstance(addr, str)
|
2020-02-03 12:35:50 +01:00
|
|
|
return self.imported_addresses.get(addr)
|
|
|
|
|
|
|
|
|
|
def load_addresses(self, wallet_type):
|
|
|
|
|
""" called from Abstract_Wallet.__init__ """
|
|
|
|
|
if wallet_type == 'imported':
|
2020-02-04 13:35:58 +01:00
|
|
|
self.imported_addresses = self.get_dict('addresses') # type: Dict[str, dict]
|
2020-02-03 12:35:50 +01:00
|
|
|
else:
|
2020-02-04 13:35:58 +01:00
|
|
|
self.get_dict('addresses')
|
2020-02-03 12:35:50 +01:00
|
|
|
for name in ['receiving', 'change']:
|
|
|
|
|
if name not in self.data['addresses']:
|
|
|
|
|
self.data['addresses'][name] = []
|
|
|
|
|
self.change_addresses = self.data['addresses']['change']
|
|
|
|
|
self.receiving_addresses = self.data['addresses']['receiving']
|
|
|
|
|
self._addr_to_addr_index = {} # type: Dict[str, Sequence[int]] # key: address, value: (is_change, index)
|
|
|
|
|
for i, addr in enumerate(self.receiving_addresses):
|
|
|
|
|
self._addr_to_addr_index[addr] = (0, i)
|
|
|
|
|
for i, addr in enumerate(self.change_addresses):
|
|
|
|
|
self._addr_to_addr_index[addr] = (1, i)
|
|
|
|
|
|
|
|
|
|
@profiler
|
2023-08-18 15:13:33 +02:00
|
|
|
def load_transactions(self):
|
2020-02-03 12:35:50 +01:00
|
|
|
# references in self.data
|
|
|
|
|
# TODO make all these private
|
2020-02-24 18:26:49 +01:00
|
|
|
# txid -> address -> prev_outpoint -> value
|
|
|
|
|
self.txi = self.get_dict('txi') # type: Dict[str, Dict[str, Dict[str, int]]]
|
|
|
|
|
# txid -> address -> output_index -> (value, is_coinbase)
|
|
|
|
|
self.txo = self.get_dict('txo') # type: Dict[str, Dict[str, Dict[str, Tuple[int, bool]]]]
|
2020-02-04 13:35:58 +01:00
|
|
|
self.transactions = self.get_dict('transactions') # type: Dict[str, Transaction]
|
|
|
|
|
self.spent_outpoints = self.get_dict('spent_outpoints') # txid -> output_index -> next_txid
|
|
|
|
|
self.history = self.get_dict('addr_history') # address -> list of (txid, height)
|
|
|
|
|
self.verified_tx = self.get_dict('verified_tx3') # txid -> (height, timestamp, txpos, header_hash)
|
|
|
|
|
self.tx_fees = self.get_dict('tx_fees') # type: Dict[str, TxFeesValue]
|
2024-01-23 01:38:17 +00:00
|
|
|
# scripthash -> outpoint -> value
|
|
|
|
|
self._prevouts_by_scripthash = self.get_dict('prevouts_by_scripthash') # type: Dict[str, Dict[str, int]]
|
2020-02-03 12:35:50 +01:00
|
|
|
# remove unreferenced tx
|
|
|
|
|
for tx_hash in list(self.transactions.keys()):
|
|
|
|
|
if not self.get_txi_addresses(tx_hash) and not self.get_txo_addresses(tx_hash):
|
|
|
|
|
self.logger.info(f"removing unreferenced tx: {tx_hash}")
|
|
|
|
|
self.transactions.pop(tx_hash)
|
|
|
|
|
# remove unreferenced outpoints
|
|
|
|
|
for prevout_hash in self.spent_outpoints.keys():
|
|
|
|
|
d = self.spent_outpoints[prevout_hash]
|
|
|
|
|
for prevout_n, spending_txid in list(d.items()):
|
|
|
|
|
if spending_txid not in self.transactions:
|
|
|
|
|
self.logger.info("removing unreferenced spent outpoint")
|
|
|
|
|
d.pop(prevout_n)
|
|
|
|
|
|
|
|
|
|
@modifier
|
|
|
|
|
def clear_history(self):
|
|
|
|
|
self.txi.clear()
|
|
|
|
|
self.txo.clear()
|
|
|
|
|
self.spent_outpoints.clear()
|
|
|
|
|
self.transactions.clear()
|
|
|
|
|
self.history.clear()
|
|
|
|
|
self.verified_tx.clear()
|
|
|
|
|
self.tx_fees.clear()
|
2020-03-27 02:28:43 +01:00
|
|
|
self._prevouts_by_scripthash.clear()
|
2020-02-04 13:35:58 +01:00
|
|
|
|
2020-09-04 16:11:01 +02:00
|
|
|
def _should_convert_to_stored_dict(self, key) -> bool:
|
|
|
|
|
if key == 'keystore':
|
|
|
|
|
return False
|
2020-02-04 12:45:31 +01:00
|
|
|
multisig_keystore_names = [('x%d' % i) for i in range(1, 16)]
|
2020-09-04 16:11:01 +02:00
|
|
|
if key in multisig_keystore_names:
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
2023-08-18 15:13:33 +02:00
|
|
|
@classmethod
|
|
|
|
|
def split_accounts(klass, root_path, split_data):
|
2020-02-05 15:13:37 +01:00
|
|
|
from .storage import WalletStorage
|
2023-08-18 15:13:33 +02:00
|
|
|
file_list = []
|
|
|
|
|
for data in split_data:
|
2020-02-05 15:13:37 +01:00
|
|
|
path = root_path + '.' + data['suffix']
|
2023-08-18 15:13:33 +02:00
|
|
|
item_storage = WalletStorage(path)
|
2023-09-22 16:36:47 +00:00
|
|
|
db = WalletDB(json.dumps(data), storage=item_storage, upgrade=True)
|
2023-06-23 17:36:34 +02:00
|
|
|
db.write()
|
2023-08-18 15:13:33 +02:00
|
|
|
file_list.append(path)
|
|
|
|
|
return file_list
|
2020-02-05 15:13:37 +01:00
|
|
|
|
|
|
|
|
def get_action(self):
|
|
|
|
|
action = run_hook('get_action', self)
|
|
|
|
|
return action
|
|
|
|
|
|
|
|
|
|
def load_plugins(self):
|
|
|
|
|
wallet_type = self.get('wallet_type')
|
|
|
|
|
if wallet_type in plugin_loaders:
|
|
|
|
|
plugin_loaders[wallet_type]()
|
|
|
|
|
|
|
|
|
|
def set_keystore_encryption(self, enable):
|
|
|
|
|
self.put('use_encryption', enable)
|