2018-10-25 19:34:31 +02:00
|
|
|
# Copyright (C) 2018 The Electrum developers
|
|
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
|
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
|
|
|
|
|
2025-02-14 14:12:12 +01:00
|
|
|
from typing import TYPE_CHECKING
|
2018-10-30 20:18:26 +01:00
|
|
|
from enum import IntEnum, auto
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2025-02-14 14:12:12 +01:00
|
|
|
from .util import log_exceptions, ignore_exceptions, TxMinedInfo, BelowDustLimit
|
2025-02-24 12:20:44 +01:00
|
|
|
from .util import EventListener, event_listener
|
2022-06-04 16:48:19 +02:00
|
|
|
from .address_synchronizer import AddressSynchronizer, TX_HEIGHT_LOCAL, TX_HEIGHT_UNCONF_PARENT, TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_FUTURE
|
2025-02-14 14:12:12 +01:00
|
|
|
from .transaction import Transaction, TxOutpoint
|
2022-06-01 23:03:35 +02:00
|
|
|
from .logging import Logger
|
2022-04-28 10:21:47 +02:00
|
|
|
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-10-22 15:35:57 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
from .network import Network
|
2020-02-16 14:45:04 +01:00
|
|
|
from .lnsweep import SweepInfo
|
2020-03-30 03:49:50 +02:00
|
|
|
from .lnworker import LNWallet
|
2025-03-11 18:13:32 +01:00
|
|
|
from .lnchannel import AbstractChannel
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-10-30 20:18:26 +01:00
|
|
|
class TxMinedDepth(IntEnum):
|
|
|
|
|
""" IntEnum because we call min() in get_deepest_tx_mined_depth_for_txids """
|
|
|
|
|
DEEP = auto()
|
|
|
|
|
SHALLOW = auto()
|
|
|
|
|
MEMPOOL = auto()
|
|
|
|
|
FREE = auto()
|
2018-06-22 10:57:11 +02:00
|
|
|
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
class LNWatcher(Logger, EventListener):
|
2022-06-01 23:03:35 +02:00
|
|
|
|
2019-10-30 03:24:26 +01:00
|
|
|
LOGGING_SHORTCUT = 'W'
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2023-03-29 22:09:46 +00:00
|
|
|
def __init__(self, adb: 'AddressSynchronizer', network: 'Network'):
|
2022-06-01 23:03:35 +02:00
|
|
|
|
|
|
|
|
Logger.__init__(self)
|
|
|
|
|
self.adb = adb
|
2018-11-21 17:00:01 +01:00
|
|
|
self.config = network.config
|
2020-05-20 13:49:44 +02:00
|
|
|
self.callbacks = {} # address -> lambda: coroutine
|
2019-07-05 14:42:09 +02:00
|
|
|
self.network = network
|
2022-06-16 12:05:05 +02:00
|
|
|
self.register_callbacks()
|
2019-02-28 06:08:58 +01:00
|
|
|
# status gets populated when we run
|
|
|
|
|
self.channel_status = {}
|
|
|
|
|
|
2021-03-09 17:52:36 +01:00
|
|
|
async def stop(self):
|
2022-06-16 12:05:05 +02:00
|
|
|
self.unregister_callbacks()
|
2020-05-01 04:39:49 +02:00
|
|
|
|
2019-02-28 06:08:58 +01:00
|
|
|
def get_channel_status(self, outpoint):
|
|
|
|
|
return self.channel_status.get(outpoint, 'unknown')
|
2018-10-12 14:53:22 +02:00
|
|
|
|
2019-10-29 20:39:58 +01:00
|
|
|
def add_channel(self, outpoint: str, address: str) -> None:
|
|
|
|
|
assert isinstance(outpoint, str)
|
|
|
|
|
assert isinstance(address, str)
|
2020-05-20 13:49:44 +02:00
|
|
|
cb = lambda: self.check_onchain_situation(address, outpoint)
|
|
|
|
|
self.add_callback(address, cb)
|
2018-09-12 16:17:10 +02:00
|
|
|
|
2019-06-18 13:49:31 +02:00
|
|
|
async def unwatch_channel(self, address, funding_outpoint):
|
2020-03-14 04:44:01 +01:00
|
|
|
self.logger.info(f'unwatching {funding_outpoint}')
|
2020-05-20 13:49:44 +02:00
|
|
|
self.remove_callback(address)
|
|
|
|
|
|
|
|
|
|
def remove_callback(self, address):
|
|
|
|
|
self.callbacks.pop(address, None)
|
|
|
|
|
|
|
|
|
|
def add_callback(self, address, callback):
|
2022-06-01 23:03:35 +02:00
|
|
|
self.adb.add_address(address)
|
2020-05-20 13:49:44 +02:00
|
|
|
self.callbacks[address] = callback
|
2018-12-04 20:50:24 +01:00
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_blockchain_updated(self, *args):
|
2022-06-01 23:03:35 +02:00
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2025-02-14 14:12:12 +01:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_wallet_updated(self, wallet):
|
|
|
|
|
# called if we add local tx
|
|
|
|
|
if wallet.adb != self.adb:
|
|
|
|
|
return
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_adb_added_verified_tx(self, adb, tx_hash):
|
2022-06-01 23:03:35 +02:00
|
|
|
if adb != self.adb:
|
|
|
|
|
return
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_adb_set_up_to_date(self, adb):
|
2022-06-01 23:03:35 +02:00
|
|
|
if adb != self.adb:
|
|
|
|
|
return
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2018-10-12 19:40:12 +02:00
|
|
|
@log_exceptions
|
2022-06-01 23:03:35 +02:00
|
|
|
async def trigger_callbacks(self):
|
|
|
|
|
if not self.adb.synchronizer:
|
2019-05-02 17:59:11 +02:00
|
|
|
self.logger.info("synchronizer not set yet")
|
2018-09-12 16:17:10 +02:00
|
|
|
return
|
2020-05-20 13:49:44 +02:00
|
|
|
for address, callback in list(self.callbacks.items()):
|
|
|
|
|
await callback()
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-12-04 20:50:24 +01:00
|
|
|
async def check_onchain_situation(self, address, funding_outpoint):
|
2020-03-07 10:39:44 +01:00
|
|
|
# early return if address has not been added yet
|
2022-06-01 23:03:35 +02:00
|
|
|
if not self.adb.is_mine(address):
|
2020-03-07 10:39:44 +01:00
|
|
|
return
|
2023-03-09 15:18:09 +00:00
|
|
|
# inspect_tx_candidate might have added new addresses, in which case we return early
|
2022-06-01 23:03:35 +02:00
|
|
|
if not self.adb.is_up_to_date():
|
2020-02-18 17:06:54 +01:00
|
|
|
return
|
2019-01-31 16:41:43 +01:00
|
|
|
funding_txid = funding_outpoint.split(':')[0]
|
2022-06-01 23:03:35 +02:00
|
|
|
funding_height = self.adb.get_tx_height(funding_txid)
|
2024-12-10 13:28:10 +01:00
|
|
|
closing_txid = self.get_spender(funding_outpoint)
|
2022-06-01 23:03:35 +02:00
|
|
|
closing_height = self.adb.get_tx_height(closing_txid)
|
2020-02-16 14:26:07 +01:00
|
|
|
if closing_txid:
|
2022-06-01 23:03:35 +02:00
|
|
|
closing_tx = self.adb.get_transaction(closing_txid)
|
2020-02-16 14:26:07 +01:00
|
|
|
if closing_tx:
|
2024-12-10 13:28:10 +01:00
|
|
|
keep_watching = await self.sweep_commitment_transaction(funding_outpoint, closing_tx)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2019-05-23 16:13:28 +02:00
|
|
|
self.logger.info(f"channel {funding_outpoint} closed by {closing_txid}. still waiting for tx itself...")
|
2020-02-16 18:54:27 +01:00
|
|
|
keep_watching = True
|
|
|
|
|
else:
|
|
|
|
|
keep_watching = True
|
|
|
|
|
await self.update_channel_state(
|
2020-04-13 15:57:53 +02:00
|
|
|
funding_outpoint=funding_outpoint,
|
|
|
|
|
funding_txid=funding_txid,
|
|
|
|
|
funding_height=funding_height,
|
|
|
|
|
closing_txid=closing_txid,
|
|
|
|
|
closing_height=closing_height,
|
|
|
|
|
keep_watching=keep_watching)
|
2018-12-04 20:50:24 +01:00
|
|
|
if not keep_watching:
|
2019-06-18 13:49:31 +02:00
|
|
|
await self.unwatch_channel(address, funding_outpoint)
|
2019-09-11 11:58:28 +02:00
|
|
|
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
async def sweep_commitment_transaction(self, funding_outpoint: str, closing_tx: Transaction) -> bool:
|
2020-04-13 15:57:53 +02:00
|
|
|
raise NotImplementedError() # implemented by subclasses
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2020-04-13 15:57:53 +02:00
|
|
|
async def update_channel_state(self, *, funding_outpoint: str, funding_txid: str,
|
|
|
|
|
funding_height: TxMinedInfo, closing_txid: str,
|
|
|
|
|
closing_height: TxMinedInfo, keep_watching: bool) -> None:
|
|
|
|
|
raise NotImplementedError() # implemented by subclasses
|
2018-12-04 20:50:24 +01:00
|
|
|
|
2024-12-10 13:28:10 +01:00
|
|
|
|
|
|
|
|
def get_spender(self, outpoint) -> str:
|
|
|
|
|
"""
|
|
|
|
|
returns txid spending outpoint.
|
|
|
|
|
subscribes to addresses as a side effect.
|
|
|
|
|
"""
|
|
|
|
|
prev_txid, index = outpoint.split(':')
|
|
|
|
|
spender_txid = self.adb.db.get_spent_outpoint(prev_txid, int(index))
|
2025-02-14 14:12:12 +01:00
|
|
|
# discard local spenders
|
|
|
|
|
tx_mined_status = self.adb.get_tx_height(spender_txid)
|
|
|
|
|
if tx_mined_status.height in [TX_HEIGHT_LOCAL, TX_HEIGHT_FUTURE]:
|
|
|
|
|
spender_txid = None
|
2024-12-10 13:28:10 +01:00
|
|
|
if not spender_txid:
|
|
|
|
|
return
|
|
|
|
|
spender_tx = self.adb.get_transaction(spender_txid)
|
|
|
|
|
for i, o in enumerate(spender_tx.outputs()):
|
|
|
|
|
if o.address is None:
|
|
|
|
|
continue
|
|
|
|
|
if not self.adb.is_mine(o.address):
|
|
|
|
|
self.adb.add_address(o.address)
|
|
|
|
|
return spender_txid
|
|
|
|
|
|
2019-07-05 14:42:09 +02:00
|
|
|
def get_tx_mined_depth(self, txid: str):
|
|
|
|
|
if not txid:
|
|
|
|
|
return TxMinedDepth.FREE
|
2022-06-01 23:03:35 +02:00
|
|
|
tx_mined_depth = self.adb.get_tx_height(txid)
|
2019-07-05 14:42:09 +02:00
|
|
|
height, conf = tx_mined_depth.height, tx_mined_depth.conf
|
2025-03-11 18:13:32 +01:00
|
|
|
if conf > 20:
|
2019-07-05 14:42:09 +02:00
|
|
|
return TxMinedDepth.DEEP
|
|
|
|
|
elif conf > 0:
|
|
|
|
|
return TxMinedDepth.SHALLOW
|
|
|
|
|
elif height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT):
|
|
|
|
|
return TxMinedDepth.MEMPOOL
|
2022-06-04 16:48:19 +02:00
|
|
|
elif height in (TX_HEIGHT_LOCAL, TX_HEIGHT_FUTURE):
|
2019-07-05 14:42:09 +02:00
|
|
|
return TxMinedDepth.FREE
|
|
|
|
|
elif height > 0 and conf == 0:
|
|
|
|
|
# unverified but claimed to be mined
|
|
|
|
|
return TxMinedDepth.MEMPOOL
|
|
|
|
|
else:
|
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
2020-02-16 18:54:27 +01:00
|
|
|
def is_deeply_mined(self, txid):
|
|
|
|
|
return self.get_tx_mined_depth(txid) == TxMinedDepth.DEEP
|
|
|
|
|
|
2019-07-05 14:42:09 +02:00
|
|
|
|
2020-02-24 12:01:54 +01:00
|
|
|
|
2020-02-16 14:26:07 +01:00
|
|
|
class LNWalletWatcher(LNWatcher):
|
|
|
|
|
|
2020-03-30 03:49:50 +02:00
|
|
|
def __init__(self, lnworker: 'LNWallet', network: 'Network'):
|
2020-02-16 14:26:07 +01:00
|
|
|
self.network = network
|
|
|
|
|
self.lnworker = lnworker
|
2022-06-01 23:03:35 +02:00
|
|
|
LNWatcher.__init__(self, lnworker.wallet.adb, network)
|
2020-09-18 20:54:09 +02:00
|
|
|
|
2024-10-23 15:28:01 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_blockchain_updated(self, *args):
|
|
|
|
|
# overload parent method with cache invalidation
|
|
|
|
|
# we invalidate the cache on each new block because
|
|
|
|
|
# some processes affect the list of sweep transactions
|
|
|
|
|
# (hold invoice preimage revealed, MPP completed, etc)
|
|
|
|
|
for chan in self.lnworker.channels.values():
|
|
|
|
|
chan._sweep_info.clear()
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2020-09-18 20:54:09 +02:00
|
|
|
def diagnostic_name(self):
|
|
|
|
|
return f"{self.lnworker.wallet.diagnostic_name()}-LNW"
|
2020-02-16 14:26:07 +01:00
|
|
|
|
|
|
|
|
@ignore_exceptions
|
|
|
|
|
@log_exceptions
|
2020-04-13 15:57:53 +02:00
|
|
|
async def update_channel_state(self, *, funding_outpoint: str, funding_txid: str,
|
|
|
|
|
funding_height: TxMinedInfo, closing_txid: str,
|
|
|
|
|
closing_height: TxMinedInfo, keep_watching: bool) -> None:
|
2020-02-16 14:26:07 +01:00
|
|
|
chan = self.lnworker.channel_by_txo(funding_outpoint)
|
|
|
|
|
if not chan:
|
|
|
|
|
return
|
2022-07-12 10:13:19 +02:00
|
|
|
chan.update_onchain_state(
|
|
|
|
|
funding_txid=funding_txid,
|
|
|
|
|
funding_height=funding_height,
|
|
|
|
|
closing_txid=closing_txid,
|
|
|
|
|
closing_height=closing_height,
|
|
|
|
|
keep_watching=keep_watching)
|
2022-08-02 18:00:33 +02:00
|
|
|
await self.lnworker.handle_onchain_state(chan)
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2022-06-04 16:48:19 +02:00
|
|
|
@log_exceptions
|
2024-12-10 13:28:10 +01:00
|
|
|
async def sweep_commitment_transaction(self, funding_outpoint, closing_tx) -> bool:
|
2021-09-13 14:46:29 +02:00
|
|
|
"""This function is called when a channel was closed. In this case
|
|
|
|
|
we need to check for redeemable outputs of the commitment transaction
|
|
|
|
|
or spenders down the line (HTLC-timeout/success transactions).
|
|
|
|
|
|
|
|
|
|
Returns whether we should continue to monitor."""
|
2020-02-16 14:26:07 +01:00
|
|
|
chan = self.lnworker.channel_by_txo(funding_outpoint)
|
|
|
|
|
if not chan:
|
2020-04-13 15:57:53 +02:00
|
|
|
return False
|
2021-09-13 14:46:29 +02:00
|
|
|
# detect who closed and get information about how to claim outputs
|
2020-02-16 14:26:07 +01:00
|
|
|
sweep_info_dict = chan.sweep_ctx(closing_tx)
|
2025-02-14 14:12:12 +01:00
|
|
|
#self.logger.info(f"do_breach_remedy: {[x.name for x in sweep_info_dict.values()]}")
|
2020-02-21 16:35:27 +01:00
|
|
|
keep_watching = False if sweep_info_dict else not self.is_deeply_mined(closing_tx.txid())
|
2021-09-13 14:46:29 +02:00
|
|
|
# create and broadcast transactions
|
2020-02-16 14:26:07 +01:00
|
|
|
for prevout, sweep_info in sweep_info_dict.items():
|
2024-11-14 10:18:07 +01:00
|
|
|
prev_txid, prev_index = prevout.split(':')
|
2024-11-25 11:39:12 +01:00
|
|
|
name = sweep_info.name + ' ' + chan.get_id_for_log()
|
2024-12-12 12:25:07 +01:00
|
|
|
self.lnworker.wallet.set_default_label(prevout, name)
|
2024-11-14 10:18:07 +01:00
|
|
|
if not self.adb.get_transaction(prev_txid):
|
|
|
|
|
# do not keep watching if prevout does not exist
|
2025-02-14 14:12:12 +01:00
|
|
|
self.logger.info(f'prevout does not exist for {name}: {prevout}')
|
2024-11-14 10:18:07 +01:00
|
|
|
continue
|
2024-12-10 13:28:10 +01:00
|
|
|
spender_txid = self.get_spender(prevout)
|
2022-06-10 16:00:30 +02:00
|
|
|
spender_tx = self.adb.get_transaction(spender_txid) if spender_txid else None
|
|
|
|
|
if spender_tx:
|
|
|
|
|
# the spender might be the remote, revoked or not
|
2024-12-11 11:51:03 +01:00
|
|
|
htlc_sweepinfo = chan.maybe_sweep_htlcs(closing_tx, spender_tx)
|
|
|
|
|
for prevout2, htlc_sweep_info in htlc_sweepinfo.items():
|
|
|
|
|
htlc_tx_spender = self.get_spender(prevout2)
|
2024-12-12 12:25:07 +01:00
|
|
|
self.lnworker.wallet.set_default_label(prevout2, htlc_sweep_info.name)
|
2021-11-15 14:23:33 +01:00
|
|
|
if htlc_tx_spender:
|
|
|
|
|
keep_watching |= not self.is_deeply_mined(htlc_tx_spender)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2025-02-14 14:12:12 +01:00
|
|
|
keep_watching |= self.maybe_redeem(htlc_sweep_info)
|
2025-03-07 10:04:44 +01:00
|
|
|
keep_watching |= not self.is_deeply_mined(spender_txid)
|
2025-03-11 18:13:32 +01:00
|
|
|
self.maybe_extract_preimage(chan, spender_tx, prevout)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2025-02-14 14:12:12 +01:00
|
|
|
keep_watching |= self.maybe_redeem(sweep_info)
|
2020-02-16 18:54:27 +01:00
|
|
|
return keep_watching
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2025-02-14 14:12:12 +01:00
|
|
|
def maybe_redeem(self, sweep_info: 'SweepInfo') -> bool:
|
|
|
|
|
""" returns False if it was dust """
|
|
|
|
|
try:
|
|
|
|
|
self.lnworker.wallet.txbatcher.add_sweep_input('lnwatcher', sweep_info, self.config.FEE_POLICY_LIGHTNING)
|
|
|
|
|
except BelowDustLimit:
|
|
|
|
|
return False
|
|
|
|
|
return True
|
2025-03-11 18:13:32 +01:00
|
|
|
|
|
|
|
|
def maybe_extract_preimage(self, chan: 'AbstractChannel', spender_tx: Transaction, prevout: str):
|
|
|
|
|
txin_idx = spender_tx.get_input_idx_that_spent_prevout(TxOutpoint.from_str(prevout))
|
|
|
|
|
assert txin_idx is not None
|
|
|
|
|
spender_txin = spender_tx.inputs()[txin_idx]
|
|
|
|
|
chan.extract_preimage_from_htlc_txin(
|
|
|
|
|
spender_txin,
|
|
|
|
|
is_deeply_mined=self.is_deeply_mined(spender_tx.txid()),
|
|
|
|
|
)
|