2018-10-25 19:34:31 +02:00
|
|
|
# Copyright (C) 2018 The Electrum developers
|
|
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
|
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
|
|
|
|
|
2018-10-22 15:35:57 +02:00
|
|
|
from typing import NamedTuple, Iterable, TYPE_CHECKING
|
2024-12-11 11:51:03 +01:00
|
|
|
import copy
|
2018-10-12 14:53:22 +02:00
|
|
|
import asyncio
|
2018-10-30 20:18:26 +01:00
|
|
|
from enum import IntEnum, auto
|
2018-11-01 18:49:39 +01:00
|
|
|
from typing import NamedTuple, Dict
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2020-04-14 16:12:47 +02:00
|
|
|
from . import util
|
2025-02-24 12:20:44 +01:00
|
|
|
from .util import log_exceptions, ignore_exceptions, TxMinedInfo
|
|
|
|
|
from .util import EventListener, event_listener
|
2022-06-04 16:48:19 +02:00
|
|
|
from .address_synchronizer import AddressSynchronizer, TX_HEIGHT_LOCAL, TX_HEIGHT_UNCONF_PARENT, TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_FUTURE
|
2024-12-12 10:52:21 +01:00
|
|
|
from .transaction import Transaction, TxOutpoint, PartialTransaction
|
2022-06-01 23:03:35 +02:00
|
|
|
from .logging import Logger
|
2025-02-24 12:20:44 +01:00
|
|
|
from .bitcoin import dust_threshold
|
|
|
|
|
from .fee_policy import FeePolicy
|
2022-04-28 10:21:47 +02:00
|
|
|
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-10-22 15:35:57 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
from .network import Network
|
2020-02-16 14:45:04 +01:00
|
|
|
from .lnsweep import SweepInfo
|
2020-03-30 03:49:50 +02:00
|
|
|
from .lnworker import LNWallet
|
2025-03-11 18:13:32 +01:00
|
|
|
from .lnchannel import AbstractChannel
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-10-30 20:18:26 +01:00
|
|
|
class TxMinedDepth(IntEnum):
|
|
|
|
|
""" IntEnum because we call min() in get_deepest_tx_mined_depth_for_txids """
|
|
|
|
|
DEEP = auto()
|
|
|
|
|
SHALLOW = auto()
|
|
|
|
|
MEMPOOL = auto()
|
|
|
|
|
FREE = auto()
|
2018-06-22 10:57:11 +02:00
|
|
|
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
class LNWatcher(Logger, EventListener):
|
2022-06-01 23:03:35 +02:00
|
|
|
|
2019-10-30 03:24:26 +01:00
|
|
|
LOGGING_SHORTCUT = 'W'
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2023-03-29 22:09:46 +00:00
|
|
|
def __init__(self, adb: 'AddressSynchronizer', network: 'Network'):
|
2022-06-01 23:03:35 +02:00
|
|
|
|
|
|
|
|
Logger.__init__(self)
|
|
|
|
|
self.adb = adb
|
2018-11-21 17:00:01 +01:00
|
|
|
self.config = network.config
|
2020-05-20 13:49:44 +02:00
|
|
|
self.callbacks = {} # address -> lambda: coroutine
|
2019-07-05 14:42:09 +02:00
|
|
|
self.network = network
|
2022-06-16 12:05:05 +02:00
|
|
|
self.register_callbacks()
|
2019-02-28 06:08:58 +01:00
|
|
|
# status gets populated when we run
|
|
|
|
|
self.channel_status = {}
|
2025-02-24 12:20:44 +01:00
|
|
|
self.fee_policy = FeePolicy('eta:2')
|
2019-02-28 06:08:58 +01:00
|
|
|
|
2021-03-09 17:52:36 +01:00
|
|
|
async def stop(self):
|
2022-06-16 12:05:05 +02:00
|
|
|
self.unregister_callbacks()
|
2020-05-01 04:39:49 +02:00
|
|
|
|
2019-02-28 06:08:58 +01:00
|
|
|
def get_channel_status(self, outpoint):
|
|
|
|
|
return self.channel_status.get(outpoint, 'unknown')
|
2018-10-12 14:53:22 +02:00
|
|
|
|
2019-10-29 20:39:58 +01:00
|
|
|
def add_channel(self, outpoint: str, address: str) -> None:
|
|
|
|
|
assert isinstance(outpoint, str)
|
|
|
|
|
assert isinstance(address, str)
|
2020-05-20 13:49:44 +02:00
|
|
|
cb = lambda: self.check_onchain_situation(address, outpoint)
|
|
|
|
|
self.add_callback(address, cb)
|
2018-09-12 16:17:10 +02:00
|
|
|
|
2019-06-18 13:49:31 +02:00
|
|
|
async def unwatch_channel(self, address, funding_outpoint):
|
2020-03-14 04:44:01 +01:00
|
|
|
self.logger.info(f'unwatching {funding_outpoint}')
|
2020-05-20 13:49:44 +02:00
|
|
|
self.remove_callback(address)
|
|
|
|
|
|
|
|
|
|
def remove_callback(self, address):
|
|
|
|
|
self.callbacks.pop(address, None)
|
|
|
|
|
|
|
|
|
|
def add_callback(self, address, callback):
|
2022-06-01 23:03:35 +02:00
|
|
|
self.adb.add_address(address)
|
2020-05-20 13:49:44 +02:00
|
|
|
self.callbacks[address] = callback
|
2018-12-04 20:50:24 +01:00
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_blockchain_updated(self, *args):
|
2022-06-01 23:03:35 +02:00
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_adb_added_verified_tx(self, adb, tx_hash):
|
2022-06-01 23:03:35 +02:00
|
|
|
if adb != self.adb:
|
|
|
|
|
return
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2022-06-16 12:05:05 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_adb_set_up_to_date(self, adb):
|
2022-06-01 23:03:35 +02:00
|
|
|
if adb != self.adb:
|
|
|
|
|
return
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2018-10-12 19:40:12 +02:00
|
|
|
@log_exceptions
|
2022-06-01 23:03:35 +02:00
|
|
|
async def trigger_callbacks(self):
|
|
|
|
|
if not self.adb.synchronizer:
|
2019-05-02 17:59:11 +02:00
|
|
|
self.logger.info("synchronizer not set yet")
|
2018-09-12 16:17:10 +02:00
|
|
|
return
|
2020-05-20 13:49:44 +02:00
|
|
|
for address, callback in list(self.callbacks.items()):
|
|
|
|
|
await callback()
|
2018-07-16 17:01:18 +02:00
|
|
|
|
2018-12-04 20:50:24 +01:00
|
|
|
async def check_onchain_situation(self, address, funding_outpoint):
|
2020-03-07 10:39:44 +01:00
|
|
|
# early return if address has not been added yet
|
2022-06-01 23:03:35 +02:00
|
|
|
if not self.adb.is_mine(address):
|
2020-03-07 10:39:44 +01:00
|
|
|
return
|
2023-03-09 15:18:09 +00:00
|
|
|
# inspect_tx_candidate might have added new addresses, in which case we return early
|
2022-06-01 23:03:35 +02:00
|
|
|
if not self.adb.is_up_to_date():
|
2020-02-18 17:06:54 +01:00
|
|
|
return
|
2019-01-31 16:41:43 +01:00
|
|
|
funding_txid = funding_outpoint.split(':')[0]
|
2022-06-01 23:03:35 +02:00
|
|
|
funding_height = self.adb.get_tx_height(funding_txid)
|
2024-12-10 13:28:10 +01:00
|
|
|
closing_txid = self.get_spender(funding_outpoint)
|
2022-06-01 23:03:35 +02:00
|
|
|
closing_height = self.adb.get_tx_height(closing_txid)
|
2020-02-16 14:26:07 +01:00
|
|
|
if closing_txid:
|
2022-06-01 23:03:35 +02:00
|
|
|
closing_tx = self.adb.get_transaction(closing_txid)
|
2020-02-16 14:26:07 +01:00
|
|
|
if closing_tx:
|
2024-12-10 13:28:10 +01:00
|
|
|
keep_watching = await self.sweep_commitment_transaction(funding_outpoint, closing_tx)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2019-05-23 16:13:28 +02:00
|
|
|
self.logger.info(f"channel {funding_outpoint} closed by {closing_txid}. still waiting for tx itself...")
|
2020-02-16 18:54:27 +01:00
|
|
|
keep_watching = True
|
|
|
|
|
else:
|
|
|
|
|
keep_watching = True
|
|
|
|
|
await self.update_channel_state(
|
2020-04-13 15:57:53 +02:00
|
|
|
funding_outpoint=funding_outpoint,
|
|
|
|
|
funding_txid=funding_txid,
|
|
|
|
|
funding_height=funding_height,
|
|
|
|
|
closing_txid=closing_txid,
|
|
|
|
|
closing_height=closing_height,
|
|
|
|
|
keep_watching=keep_watching)
|
2018-12-04 20:50:24 +01:00
|
|
|
if not keep_watching:
|
2019-06-18 13:49:31 +02:00
|
|
|
await self.unwatch_channel(address, funding_outpoint)
|
2019-09-11 11:58:28 +02:00
|
|
|
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
async def sweep_commitment_transaction(self, funding_outpoint: str, closing_tx: Transaction) -> bool:
|
2020-04-13 15:57:53 +02:00
|
|
|
raise NotImplementedError() # implemented by subclasses
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2020-04-13 15:57:53 +02:00
|
|
|
async def update_channel_state(self, *, funding_outpoint: str, funding_txid: str,
|
|
|
|
|
funding_height: TxMinedInfo, closing_txid: str,
|
|
|
|
|
closing_height: TxMinedInfo, keep_watching: bool) -> None:
|
|
|
|
|
raise NotImplementedError() # implemented by subclasses
|
2018-12-04 20:50:24 +01:00
|
|
|
|
2024-12-10 13:28:10 +01:00
|
|
|
|
|
|
|
|
def get_spender(self, outpoint) -> str:
|
|
|
|
|
"""
|
|
|
|
|
returns txid spending outpoint.
|
|
|
|
|
subscribes to addresses as a side effect.
|
|
|
|
|
"""
|
|
|
|
|
prev_txid, index = outpoint.split(':')
|
|
|
|
|
spender_txid = self.adb.db.get_spent_outpoint(prev_txid, int(index))
|
|
|
|
|
if not spender_txid:
|
|
|
|
|
return
|
|
|
|
|
spender_tx = self.adb.get_transaction(spender_txid)
|
|
|
|
|
for i, o in enumerate(spender_tx.outputs()):
|
|
|
|
|
if o.address is None:
|
|
|
|
|
continue
|
|
|
|
|
if not self.adb.is_mine(o.address):
|
|
|
|
|
self.adb.add_address(o.address)
|
|
|
|
|
return spender_txid
|
|
|
|
|
|
2019-07-05 14:42:09 +02:00
|
|
|
def get_tx_mined_depth(self, txid: str):
|
|
|
|
|
if not txid:
|
|
|
|
|
return TxMinedDepth.FREE
|
2022-06-01 23:03:35 +02:00
|
|
|
tx_mined_depth = self.adb.get_tx_height(txid)
|
2019-07-05 14:42:09 +02:00
|
|
|
height, conf = tx_mined_depth.height, tx_mined_depth.conf
|
2025-03-11 18:13:32 +01:00
|
|
|
if conf > 20:
|
2019-07-05 14:42:09 +02:00
|
|
|
return TxMinedDepth.DEEP
|
|
|
|
|
elif conf > 0:
|
|
|
|
|
return TxMinedDepth.SHALLOW
|
|
|
|
|
elif height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT):
|
|
|
|
|
return TxMinedDepth.MEMPOOL
|
2022-06-04 16:48:19 +02:00
|
|
|
elif height in (TX_HEIGHT_LOCAL, TX_HEIGHT_FUTURE):
|
2019-07-05 14:42:09 +02:00
|
|
|
return TxMinedDepth.FREE
|
|
|
|
|
elif height > 0 and conf == 0:
|
|
|
|
|
# unverified but claimed to be mined
|
|
|
|
|
return TxMinedDepth.MEMPOOL
|
|
|
|
|
else:
|
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
2020-02-16 18:54:27 +01:00
|
|
|
def is_deeply_mined(self, txid):
|
|
|
|
|
return self.get_tx_mined_depth(txid) == TxMinedDepth.DEEP
|
|
|
|
|
|
2019-07-05 14:42:09 +02:00
|
|
|
|
2020-02-24 12:01:54 +01:00
|
|
|
|
2020-02-16 14:26:07 +01:00
|
|
|
class LNWalletWatcher(LNWatcher):
|
|
|
|
|
|
2020-03-30 03:49:50 +02:00
|
|
|
def __init__(self, lnworker: 'LNWallet', network: 'Network'):
|
2020-02-16 14:26:07 +01:00
|
|
|
self.network = network
|
|
|
|
|
self.lnworker = lnworker
|
2022-06-01 23:03:35 +02:00
|
|
|
LNWatcher.__init__(self, lnworker.wallet.adb, network)
|
2020-09-18 20:54:09 +02:00
|
|
|
|
2024-10-23 15:28:01 +02:00
|
|
|
@event_listener
|
|
|
|
|
async def on_event_blockchain_updated(self, *args):
|
|
|
|
|
# overload parent method with cache invalidation
|
|
|
|
|
# we invalidate the cache on each new block because
|
|
|
|
|
# some processes affect the list of sweep transactions
|
|
|
|
|
# (hold invoice preimage revealed, MPP completed, etc)
|
|
|
|
|
for chan in self.lnworker.channels.values():
|
|
|
|
|
chan._sweep_info.clear()
|
|
|
|
|
await self.trigger_callbacks()
|
|
|
|
|
|
2020-09-18 20:54:09 +02:00
|
|
|
def diagnostic_name(self):
|
|
|
|
|
return f"{self.lnworker.wallet.diagnostic_name()}-LNW"
|
2020-02-16 14:26:07 +01:00
|
|
|
|
|
|
|
|
@ignore_exceptions
|
|
|
|
|
@log_exceptions
|
2020-04-13 15:57:53 +02:00
|
|
|
async def update_channel_state(self, *, funding_outpoint: str, funding_txid: str,
|
|
|
|
|
funding_height: TxMinedInfo, closing_txid: str,
|
|
|
|
|
closing_height: TxMinedInfo, keep_watching: bool) -> None:
|
2020-02-16 14:26:07 +01:00
|
|
|
chan = self.lnworker.channel_by_txo(funding_outpoint)
|
|
|
|
|
if not chan:
|
|
|
|
|
return
|
2022-07-12 10:13:19 +02:00
|
|
|
chan.update_onchain_state(
|
|
|
|
|
funding_txid=funding_txid,
|
|
|
|
|
funding_height=funding_height,
|
|
|
|
|
closing_txid=closing_txid,
|
|
|
|
|
closing_height=closing_height,
|
|
|
|
|
keep_watching=keep_watching)
|
2022-08-02 18:00:33 +02:00
|
|
|
await self.lnworker.handle_onchain_state(chan)
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2025-02-24 12:20:44 +01:00
|
|
|
def is_dust(self, sweep_info):
|
|
|
|
|
if sweep_info.name in ['local_anchor', 'remote_anchor']:
|
|
|
|
|
return False
|
|
|
|
|
if sweep_info.txout is not None:
|
|
|
|
|
return False
|
|
|
|
|
value = sweep_info.txin._trusted_value_sats
|
|
|
|
|
witness_size = len(sweep_info.txin.make_witness(71*b'\x00'))
|
|
|
|
|
tx_size_vbytes = 84 + witness_size//4 # assumes no batching, sweep to p2wpkh
|
|
|
|
|
self.logger.info(f'{sweep_info.name} size = {tx_size_vbytes}')
|
|
|
|
|
fee = self.fee_policy.estimate_fee(tx_size_vbytes, network=self.network, allow_fallback_to_static_rates=True)
|
|
|
|
|
return value - fee <= dust_threshold()
|
|
|
|
|
|
2022-06-04 16:48:19 +02:00
|
|
|
@log_exceptions
|
2024-12-10 13:28:10 +01:00
|
|
|
async def sweep_commitment_transaction(self, funding_outpoint, closing_tx) -> bool:
|
2021-09-13 14:46:29 +02:00
|
|
|
"""This function is called when a channel was closed. In this case
|
|
|
|
|
we need to check for redeemable outputs of the commitment transaction
|
|
|
|
|
or spenders down the line (HTLC-timeout/success transactions).
|
|
|
|
|
|
|
|
|
|
Returns whether we should continue to monitor."""
|
2020-02-16 14:26:07 +01:00
|
|
|
chan = self.lnworker.channel_by_txo(funding_outpoint)
|
|
|
|
|
if not chan:
|
2020-04-13 15:57:53 +02:00
|
|
|
return False
|
2021-09-13 14:46:29 +02:00
|
|
|
# detect who closed and get information about how to claim outputs
|
2020-02-16 14:26:07 +01:00
|
|
|
sweep_info_dict = chan.sweep_ctx(closing_tx)
|
2024-10-23 15:28:01 +02:00
|
|
|
self.logger.info(f"do_breach_remedy: {[x.name for x in sweep_info_dict.values()]}")
|
2020-02-21 16:35:27 +01:00
|
|
|
keep_watching = False if sweep_info_dict else not self.is_deeply_mined(closing_tx.txid())
|
2021-09-13 14:46:29 +02:00
|
|
|
|
|
|
|
|
# create and broadcast transactions
|
2020-02-16 14:26:07 +01:00
|
|
|
for prevout, sweep_info in sweep_info_dict.items():
|
2025-02-24 12:20:44 +01:00
|
|
|
if self.is_dust(sweep_info):
|
|
|
|
|
continue
|
2024-11-14 10:18:07 +01:00
|
|
|
prev_txid, prev_index = prevout.split(':')
|
2024-11-25 11:39:12 +01:00
|
|
|
name = sweep_info.name + ' ' + chan.get_id_for_log()
|
2024-12-12 12:25:07 +01:00
|
|
|
self.lnworker.wallet.set_default_label(prevout, name)
|
2024-11-14 10:18:07 +01:00
|
|
|
if not self.adb.get_transaction(prev_txid):
|
|
|
|
|
# do not keep watching if prevout does not exist
|
2024-11-25 11:39:12 +01:00
|
|
|
self.logger.info(f'prevout does not exist for {name}: {prev_txid}')
|
2024-11-14 10:18:07 +01:00
|
|
|
continue
|
2024-12-10 13:28:10 +01:00
|
|
|
spender_txid = self.get_spender(prevout)
|
2022-06-10 16:00:30 +02:00
|
|
|
spender_tx = self.adb.get_transaction(spender_txid) if spender_txid else None
|
|
|
|
|
if spender_tx:
|
|
|
|
|
# the spender might be the remote, revoked or not
|
2024-12-11 11:51:03 +01:00
|
|
|
htlc_sweepinfo = chan.maybe_sweep_htlcs(closing_tx, spender_tx)
|
|
|
|
|
for prevout2, htlc_sweep_info in htlc_sweepinfo.items():
|
|
|
|
|
htlc_tx_spender = self.get_spender(prevout2)
|
2024-12-12 12:25:07 +01:00
|
|
|
self.lnworker.wallet.set_default_label(prevout2, htlc_sweep_info.name)
|
2021-11-15 14:23:33 +01:00
|
|
|
if htlc_tx_spender:
|
|
|
|
|
keep_watching |= not self.is_deeply_mined(htlc_tx_spender)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2020-02-16 18:54:27 +01:00
|
|
|
keep_watching = True
|
2024-12-11 11:51:03 +01:00
|
|
|
await self.maybe_redeem(prevout2, htlc_sweep_info, name)
|
2025-03-07 10:04:44 +01:00
|
|
|
keep_watching |= not self.is_deeply_mined(spender_txid)
|
2025-03-11 18:13:32 +01:00
|
|
|
self.maybe_extract_preimage(chan, spender_tx, prevout)
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2020-02-16 18:54:27 +01:00
|
|
|
keep_watching = True
|
2022-06-10 16:00:30 +02:00
|
|
|
# broadcast or maybe update our own tx
|
2024-12-10 13:28:10 +01:00
|
|
|
await self.maybe_redeem(prevout, sweep_info, name)
|
2022-06-10 16:00:30 +02:00
|
|
|
|
2020-02-16 18:54:27 +01:00
|
|
|
return keep_watching
|
2020-02-16 14:26:07 +01:00
|
|
|
|
2024-12-10 13:28:10 +01:00
|
|
|
def get_redeem_tx(self, prevout: str, sweep_info: 'SweepInfo', name: str):
|
2022-06-10 16:00:30 +02:00
|
|
|
# check if redeem tx needs to be updated
|
|
|
|
|
# if it is in the mempool, we need to check fee rise
|
2024-12-10 13:28:10 +01:00
|
|
|
txid = self.get_spender(prevout)
|
2022-06-10 16:00:30 +02:00
|
|
|
old_tx = self.adb.get_transaction(txid)
|
|
|
|
|
assert old_tx is not None or txid is None
|
|
|
|
|
tx_depth = self.get_tx_mined_depth(txid) if txid else None
|
|
|
|
|
if txid and tx_depth not in [TxMinedDepth.FREE, TxMinedDepth.MEMPOOL]:
|
|
|
|
|
assert old_tx is not None
|
|
|
|
|
return old_tx, None
|
2024-12-11 11:51:03 +01:00
|
|
|
# fixme: deepcopy is needed because tx.serialize() is destructive
|
|
|
|
|
inputs = [copy.deepcopy(sweep_info.txin)]
|
|
|
|
|
outputs = [sweep_info.txout] if sweep_info.txout else []
|
2024-12-12 10:52:21 +01:00
|
|
|
if sweep_info.name == 'first-stage-htlc':
|
|
|
|
|
new_tx = PartialTransaction.from_io(inputs, outputs, locktime=sweep_info.cltv_abs, version=2)
|
|
|
|
|
self.lnworker.wallet.sign_transaction(new_tx, password=None, ignore_warnings=True)
|
|
|
|
|
else:
|
|
|
|
|
# password is needed for 1st stage htlc tx with anchors because we add inputs
|
|
|
|
|
password = self.lnworker.wallet.get_unlocked_password()
|
|
|
|
|
new_tx = self.lnworker.wallet.create_transaction(
|
2025-02-24 12:20:44 +01:00
|
|
|
fee_policy = self.fee_policy,
|
2024-12-12 10:52:21 +01:00
|
|
|
inputs = inputs,
|
|
|
|
|
outputs = outputs,
|
|
|
|
|
password = password,
|
|
|
|
|
locktime = sweep_info.cltv_abs,
|
|
|
|
|
BIP69_sort=False,
|
|
|
|
|
)
|
2022-06-10 16:00:30 +02:00
|
|
|
if new_tx is None:
|
2022-06-04 16:48:19 +02:00
|
|
|
self.logger.info(f'{name} could not claim output: {prevout}, dust')
|
2022-06-10 16:00:30 +02:00
|
|
|
assert old_tx is not None
|
|
|
|
|
return old_tx, None
|
|
|
|
|
if txid is None:
|
|
|
|
|
return None, new_tx
|
|
|
|
|
elif tx_depth == TxMinedDepth.MEMPOOL:
|
|
|
|
|
delta = new_tx.get_fee() - self.adb.get_tx_fee(txid)
|
|
|
|
|
if delta > 1:
|
|
|
|
|
self.logger.info(f'increasing fee of mempool tx {name}: {prevout}')
|
|
|
|
|
return old_tx, new_tx
|
|
|
|
|
else:
|
|
|
|
|
assert old_tx is not None
|
|
|
|
|
return old_tx, None
|
|
|
|
|
elif tx_depth == TxMinedDepth.FREE:
|
|
|
|
|
# return new tx, even if it is equal to old_tx,
|
|
|
|
|
# because we need to test if it can be broadcast
|
|
|
|
|
return old_tx, new_tx
|
|
|
|
|
else:
|
|
|
|
|
assert old_tx is not None
|
|
|
|
|
return old_tx, None
|
2022-06-04 16:48:19 +02:00
|
|
|
|
2024-12-10 13:28:10 +01:00
|
|
|
async def maybe_redeem(self, prevout, sweep_info: 'SweepInfo', name: str) -> None:
|
|
|
|
|
old_tx, new_tx = self.get_redeem_tx(prevout, sweep_info, name)
|
2022-06-10 16:00:30 +02:00
|
|
|
if new_tx is None:
|
2022-06-04 16:48:19 +02:00
|
|
|
return
|
2020-02-16 14:26:07 +01:00
|
|
|
prev_txid, prev_index = prevout.split(':')
|
2022-06-10 16:00:30 +02:00
|
|
|
can_broadcast = True
|
2021-03-22 11:00:42 +01:00
|
|
|
local_height = self.network.get_local_height()
|
2023-10-19 16:33:04 +00:00
|
|
|
if sweep_info.cltv_abs:
|
|
|
|
|
wanted_height = sweep_info.cltv_abs
|
2021-03-22 11:00:42 +01:00
|
|
|
if wanted_height - local_height > 0:
|
2022-06-10 16:00:30 +02:00
|
|
|
can_broadcast = False
|
2023-04-04 13:37:10 +00:00
|
|
|
# self.logger.debug(f"pending redeem for {prevout}. waiting for {name}: CLTV ({local_height=}, {wanted_height=})")
|
2020-02-16 14:26:07 +01:00
|
|
|
if sweep_info.csv_delay:
|
2022-06-01 23:03:35 +02:00
|
|
|
prev_height = self.adb.get_tx_height(prev_txid)
|
2023-04-04 13:37:10 +00:00
|
|
|
if prev_height.height > 0:
|
|
|
|
|
wanted_height = prev_height.height + sweep_info.csv_delay - 1
|
|
|
|
|
else:
|
|
|
|
|
wanted_height = local_height + sweep_info.csv_delay
|
|
|
|
|
if wanted_height - local_height > 0:
|
2022-06-10 16:00:30 +02:00
|
|
|
can_broadcast = False
|
2023-04-04 13:37:10 +00:00
|
|
|
# self.logger.debug(
|
|
|
|
|
# f"pending redeem for {prevout}. waiting for {name}: CSV "
|
|
|
|
|
# f"({local_height=}, {wanted_height=}, {prev_height.height=}, {sweep_info.csv_delay=})")
|
2022-06-10 16:00:30 +02:00
|
|
|
if can_broadcast:
|
|
|
|
|
self.logger.info(f'we can broadcast: {name}')
|
2024-11-25 11:39:12 +01:00
|
|
|
if await self.network.try_broadcasting(new_tx, name):
|
|
|
|
|
tx_was_added = self.adb.add_transaction(new_tx, is_new=(old_tx is None))
|
|
|
|
|
else:
|
|
|
|
|
tx_was_added = False
|
2020-02-16 14:26:07 +01:00
|
|
|
else:
|
2022-06-10 16:00:30 +02:00
|
|
|
# we may have a tx with a different fee, in which case it will be replaced
|
2022-08-12 12:10:09 +02:00
|
|
|
if not old_tx or (old_tx and old_tx.txid() != new_tx.txid()):
|
2022-06-10 16:00:30 +02:00
|
|
|
try:
|
2022-08-17 10:35:43 +02:00
|
|
|
tx_was_added = self.adb.add_transaction(new_tx, is_new=(old_tx is None))
|
2022-06-10 16:00:30 +02:00
|
|
|
except Exception as e:
|
|
|
|
|
self.logger.info(f'could not add future tx: {name}. prevout: {prevout} {str(e)}')
|
|
|
|
|
tx_was_added = False
|
|
|
|
|
if tx_was_added:
|
|
|
|
|
self.logger.info(f'added redeem tx: {name}. prevout: {prevout}')
|
|
|
|
|
else:
|
|
|
|
|
tx_was_added = False
|
2023-04-04 13:37:10 +00:00
|
|
|
# set future tx regardless of tx_was_added, because it is not persisted
|
|
|
|
|
# (and wanted_height can change if input of CSV was not mined before)
|
|
|
|
|
self.adb.set_future_tx(new_tx.txid(), wanted_height=wanted_height)
|
2022-06-10 16:00:30 +02:00
|
|
|
if tx_was_added:
|
|
|
|
|
self.lnworker.wallet.set_label(new_tx.txid(), name)
|
2022-07-29 09:22:14 +02:00
|
|
|
if old_tx and old_tx.txid() != new_tx.txid():
|
2022-06-10 16:00:30 +02:00
|
|
|
self.lnworker.wallet.set_label(old_tx.txid(), None)
|
2022-06-04 16:48:19 +02:00
|
|
|
util.trigger_callback('wallet_updated', self.lnworker.wallet)
|
2025-03-11 18:13:32 +01:00
|
|
|
|
|
|
|
|
def maybe_extract_preimage(self, chan: 'AbstractChannel', spender_tx: Transaction, prevout: str):
|
|
|
|
|
txin_idx = spender_tx.get_input_idx_that_spent_prevout(TxOutpoint.from_str(prevout))
|
|
|
|
|
assert txin_idx is not None
|
|
|
|
|
spender_txin = spender_tx.inputs()[txin_idx]
|
|
|
|
|
chan.extract_preimage_from_htlc_txin(
|
|
|
|
|
spender_txin,
|
|
|
|
|
is_deeply_mined=self.is_deeply_mined(spender_tx.txid()),
|
|
|
|
|
)
|