2018-10-25 19:34:31 +02:00
|
|
|
# Copyright (C) 2018 The Electrum developers
|
|
|
|
|
# Distributed under the MIT software license, see the accompanying
|
|
|
|
|
# file LICENCE or http://www.opensource.org/licenses/mit-license.php
|
|
|
|
|
|
2018-10-05 15:37:47 +02:00
|
|
|
from enum import IntFlag, IntEnum
|
2020-03-16 22:07:00 +01:00
|
|
|
import enum
|
2018-06-28 15:50:45 +02:00
|
|
|
import json
|
2020-03-16 22:07:00 +01:00
|
|
|
from collections import namedtuple, defaultdict
|
2019-12-10 03:17:57 +01:00
|
|
|
from typing import NamedTuple, List, Tuple, Mapping, Optional, TYPE_CHECKING, Union, Dict, Set, Sequence
|
2018-09-27 16:43:33 +02:00
|
|
|
import re
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
import sys
|
|
|
|
|
|
2024-06-17 13:38:54 +02:00
|
|
|
import electrum_ecc as ecc
|
|
|
|
|
from electrum_ecc import CURVE_ORDER, ecdsa_sig64_from_der_sig, ECPubkey, string_to_number
|
2020-03-24 17:25:49 +01:00
|
|
|
import attr
|
2019-11-25 21:10:53 +01:00
|
|
|
|
2023-02-17 11:35:03 +00:00
|
|
|
from .util import bfh, inv_dict, UserFacingException
|
2020-02-21 10:57:13 +01:00
|
|
|
from .util import list_enabled_bits
|
2023-01-19 20:54:42 +01:00
|
|
|
from .util import ShortID as ShortChannelID
|
|
|
|
|
from .util import format_short_id as format_short_channel_id
|
|
|
|
|
|
2023-03-19 19:15:44 +00:00
|
|
|
from .crypto import sha256, pw_decode_with_version_and_mac
|
2019-10-23 17:09:41 +02:00
|
|
|
from .transaction import (Transaction, PartialTransaction, PartialTxInput, TxOutpoint,
|
2021-09-15 09:47:58 +02:00
|
|
|
PartialTxOutput, opcodes, TxOutput, OPPushDataPubkey)
|
2024-06-17 13:38:54 +02:00
|
|
|
from . import bitcoin, crypto, transaction
|
2023-02-26 12:14:25 +00:00
|
|
|
from . import descriptor
|
2024-04-26 20:09:00 +00:00
|
|
|
from .bitcoin import (redeem_script_to_address, address_to_script,
|
2020-10-24 06:08:34 +02:00
|
|
|
construct_witness, construct_script)
|
2018-07-27 20:59:04 +02:00
|
|
|
from . import segwit_addr
|
2018-09-27 16:43:33 +02:00
|
|
|
from .i18n import _
|
2018-10-04 16:22:15 +02:00
|
|
|
from .lnaddr import lndecode
|
2020-04-06 12:53:57 +02:00
|
|
|
from .bip32 import BIP32Node, BIP32_PRIME
|
2022-04-28 10:21:47 +02:00
|
|
|
from .transaction import BCDataStream, OPPushDataGeneric
|
2023-07-14 14:21:50 +00:00
|
|
|
from .logging import get_logger
|
2022-04-28 10:21:47 +02:00
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-10-23 16:44:39 +02:00
|
|
|
if TYPE_CHECKING:
|
2020-04-13 15:57:53 +02:00
|
|
|
from .lnchannel import Channel, AbstractChannel
|
2019-12-10 03:17:57 +01:00
|
|
|
from .lnrouter import LNPaymentRoute
|
2021-03-19 19:13:50 +01:00
|
|
|
from .lnonion import OnionRoutingFailure
|
2023-08-09 15:38:59 +00:00
|
|
|
from .simple_config import SimpleConfig
|
2018-10-23 16:44:39 +02:00
|
|
|
|
2018-10-19 21:47:51 +02:00
|
|
|
|
2023-07-14 14:21:50 +00:00
|
|
|
_logger = get_logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2020-03-30 01:53:34 +02:00
|
|
|
# defined in BOLT-03:
|
2018-06-28 15:50:45 +02:00
|
|
|
HTLC_TIMEOUT_WEIGHT = 663
|
2021-09-13 13:51:51 +02:00
|
|
|
HTLC_TIMEOUT_WEIGHT_ANCHORS = 666
|
2018-06-28 15:50:45 +02:00
|
|
|
HTLC_SUCCESS_WEIGHT = 703
|
2021-09-13 13:51:51 +02:00
|
|
|
HTLC_SUCCESS_WEIGHT_ANCHORS = 706
|
2020-03-30 01:53:34 +02:00
|
|
|
COMMITMENT_TX_WEIGHT = 724
|
2021-09-13 11:42:55 +02:00
|
|
|
COMMITMENT_TX_WEIGHT_ANCHORS = 1124
|
2020-03-30 01:53:34 +02:00
|
|
|
HTLC_OUTPUT_WEIGHT = 172
|
2021-09-13 13:41:01 +02:00
|
|
|
FIXED_ANCHOR_SAT = 330
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2023-08-09 15:38:59 +00:00
|
|
|
LN_MAX_FUNDING_SAT_LEGACY = pow(2, 24) - 1
|
2021-10-22 12:58:04 +02:00
|
|
|
DUST_LIMIT_MAX = 1000
|
2019-11-13 09:20:19 +01:00
|
|
|
|
2021-09-15 09:47:58 +02:00
|
|
|
SCRIPT_TEMPLATE_FUNDING = [opcodes.OP_2, OPPushDataPubkey, OPPushDataPubkey, opcodes.OP_2, opcodes.OP_CHECKMULTISIG]
|
|
|
|
|
|
2019-11-07 18:28:27 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
from .json_db import StoredObject, stored_in, stored_as
|
2020-01-30 18:09:32 +01:00
|
|
|
|
2020-03-17 11:04:49 +01:00
|
|
|
|
2020-03-13 11:44:29 +01:00
|
|
|
def channel_id_from_funding_tx(funding_txid: str, funding_index: int) -> Tuple[bytes, bytes]:
|
|
|
|
|
funding_txid_bytes = bytes.fromhex(funding_txid)[::-1]
|
|
|
|
|
i = int.from_bytes(funding_txid_bytes, 'big') ^ funding_index
|
|
|
|
|
return i.to_bytes(32, 'big'), funding_txid_bytes
|
|
|
|
|
|
2020-03-17 11:04:49 +01:00
|
|
|
hex_to_bytes = lambda v: v if isinstance(v, bytes) else bytes.fromhex(v) if v is not None else None
|
2024-01-31 09:24:42 +00:00
|
|
|
bytes_to_hex = lambda v: repr(v.hex()) if v is not None else None
|
2020-03-17 11:04:49 +01:00
|
|
|
json_to_keypair = lambda v: v if isinstance(v, OnlyPubkeyKeypair) else Keypair(**v) if len(v)==2 else OnlyPubkeyKeypair(**v)
|
|
|
|
|
|
|
|
|
|
|
2023-11-20 11:34:56 +00:00
|
|
|
def serialize_htlc_key(scid: bytes, htlc_id: int) -> str:
|
2023-10-22 12:49:26 +02:00
|
|
|
return scid.hex() + ':%d'%htlc_id
|
|
|
|
|
|
2023-11-20 11:34:56 +00:00
|
|
|
|
|
|
|
|
def deserialize_htlc_key(htlc_key: str) -> Tuple[bytes, int]:
|
2023-10-22 12:49:26 +02:00
|
|
|
scid, htlc_id = htlc_key.split(':')
|
|
|
|
|
return bytes.fromhex(scid), int(htlc_id)
|
|
|
|
|
|
2023-11-20 11:34:56 +00:00
|
|
|
|
2020-01-30 18:09:32 +01:00
|
|
|
@attr.s
|
2020-02-04 13:34:57 +01:00
|
|
|
class OnlyPubkeyKeypair(StoredObject):
|
2024-01-31 09:24:42 +00:00
|
|
|
pubkey = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
2020-01-30 18:09:32 +01:00
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
|
class Keypair(OnlyPubkeyKeypair):
|
2024-01-31 09:24:42 +00:00
|
|
|
privkey = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
2020-01-30 18:09:32 +01:00
|
|
|
|
|
|
|
|
@attr.s
|
2021-09-24 19:58:32 +02:00
|
|
|
class ChannelConfig(StoredObject):
|
2020-01-30 18:09:32 +01:00
|
|
|
# shared channel config fields
|
2020-03-17 11:04:49 +01:00
|
|
|
payment_basepoint = attr.ib(type=OnlyPubkeyKeypair, converter=json_to_keypair)
|
|
|
|
|
multisig_key = attr.ib(type=OnlyPubkeyKeypair, converter=json_to_keypair)
|
|
|
|
|
htlc_basepoint = attr.ib(type=OnlyPubkeyKeypair, converter=json_to_keypair)
|
|
|
|
|
delayed_basepoint = attr.ib(type=OnlyPubkeyKeypair, converter=json_to_keypair)
|
|
|
|
|
revocation_basepoint = attr.ib(type=OnlyPubkeyKeypair, converter=json_to_keypair)
|
2020-06-10 20:13:01 +02:00
|
|
|
to_self_delay = attr.ib(type=int) # applies to OTHER ctx
|
|
|
|
|
dust_limit_sat = attr.ib(type=int) # applies to SAME ctx
|
|
|
|
|
max_htlc_value_in_flight_msat = attr.ib(type=int) # max val of INCOMING htlcs
|
|
|
|
|
max_accepted_htlcs = attr.ib(type=int) # max num of INCOMING htlcs
|
2020-01-30 18:09:32 +01:00
|
|
|
initial_msat = attr.ib(type=int)
|
2020-06-10 20:13:01 +02:00
|
|
|
reserve_sat = attr.ib(type=int) # applies to OTHER ctx
|
|
|
|
|
htlc_minimum_msat = attr.ib(type=int) # smallest value for INCOMING htlc
|
2024-01-31 09:24:42 +00:00
|
|
|
upfront_shutdown_script = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
|
|
|
|
announcement_node_sig = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
|
|
|
|
announcement_bitcoin_sig = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
2020-01-30 18:09:32 +01:00
|
|
|
|
2023-08-09 15:38:59 +00:00
|
|
|
def validate_params(self, *, funding_sat: int, config: 'SimpleConfig', peer_features: 'LnFeatures') -> None:
|
2020-06-19 18:15:09 +02:00
|
|
|
conf_name = type(self).__name__
|
2020-06-08 21:17:23 +02:00
|
|
|
for key in (
|
|
|
|
|
self.payment_basepoint,
|
|
|
|
|
self.multisig_key,
|
|
|
|
|
self.htlc_basepoint,
|
|
|
|
|
self.delayed_basepoint,
|
|
|
|
|
self.revocation_basepoint
|
|
|
|
|
):
|
|
|
|
|
if not (len(key.pubkey) == 33 and ecc.ECPubkey.is_pubkey_bytes(key.pubkey)):
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. invalid pubkey in channel config")
|
2021-09-24 19:58:32 +02:00
|
|
|
if funding_sat < MIN_FUNDING_SAT:
|
|
|
|
|
raise Exception(f"funding_sat too low: {funding_sat} sat < {MIN_FUNDING_SAT}")
|
2023-08-09 15:38:59 +00:00
|
|
|
if not peer_features.supports(LnFeatures.OPTION_SUPPORT_LARGE_CHANNEL_OPT):
|
|
|
|
|
# MUST set funding_satoshis to less than 2^24 satoshi
|
|
|
|
|
if funding_sat > LN_MAX_FUNDING_SAT_LEGACY:
|
|
|
|
|
raise Exception(f"funding_sat too high: {funding_sat} sat > {LN_MAX_FUNDING_SAT_LEGACY} (legacy limit)")
|
|
|
|
|
if funding_sat > config.LIGHTNING_MAX_FUNDING_SAT:
|
|
|
|
|
raise Exception(f"funding_sat too high: {funding_sat} sat > {config.LIGHTNING_MAX_FUNDING_SAT} (config setting)")
|
2021-09-24 19:58:32 +02:00
|
|
|
# MUST set push_msat to equal or less than 1000 * funding_satoshis
|
|
|
|
|
if not (0 <= self.initial_msat <= 1000 * funding_sat):
|
|
|
|
|
raise Exception(f"{conf_name}. insane initial_msat={self.initial_msat}. (funding_sat={funding_sat})")
|
2020-06-08 21:17:23 +02:00
|
|
|
if self.reserve_sat < self.dust_limit_sat:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. MUST set channel_reserve_satoshis greater than or equal to dust_limit_satoshis")
|
2021-10-22 12:58:04 +02:00
|
|
|
if self.dust_limit_sat < bitcoin.DUST_LIMIT_UNKNOWN_SEGWIT:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. dust limit too low: {self.dust_limit_sat} sat")
|
2021-10-22 12:58:04 +02:00
|
|
|
if self.dust_limit_sat > DUST_LIMIT_MAX:
|
|
|
|
|
raise Exception(f"{conf_name}. dust limit too high: {self.dust_limit_sat} sat")
|
2020-06-08 21:17:23 +02:00
|
|
|
if self.reserve_sat > funding_sat // 100:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. reserve too high: {self.reserve_sat}, funding_sat: {funding_sat}")
|
2020-06-08 21:17:23 +02:00
|
|
|
if self.htlc_minimum_msat > 1_000:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. htlc_minimum_msat too high: {self.htlc_minimum_msat} msat")
|
|
|
|
|
HTLC_MINIMUM_MSAT_MIN = 0 # should be at least 1 really, but apparently some nodes are sending zero...
|
|
|
|
|
if self.htlc_minimum_msat < HTLC_MINIMUM_MSAT_MIN:
|
|
|
|
|
raise Exception(f"{conf_name}. htlc_minimum_msat too low: {self.htlc_minimum_msat} msat < {HTLC_MINIMUM_MSAT_MIN}")
|
2021-09-24 19:58:32 +02:00
|
|
|
if self.max_accepted_htlcs < 5:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. max_accepted_htlcs too low: {self.max_accepted_htlcs}")
|
2020-06-08 21:17:23 +02:00
|
|
|
if self.max_accepted_htlcs > 483:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. max_accepted_htlcs too high: {self.max_accepted_htlcs}")
|
2020-06-08 21:17:23 +02:00
|
|
|
if self.to_self_delay > MAXIMUM_REMOTE_TO_SELF_DELAY_ACCEPTED:
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. to_self_delay too high: {self.to_self_delay} > {MAXIMUM_REMOTE_TO_SELF_DELAY_ACCEPTED}")
|
2020-06-09 20:23:52 +02:00
|
|
|
if self.max_htlc_value_in_flight_msat < min(1000 * funding_sat, 100_000_000):
|
2020-06-19 18:15:09 +02:00
|
|
|
raise Exception(f"{conf_name}. max_htlc_value_in_flight_msat is too small: {self.max_htlc_value_in_flight_msat}")
|
2020-06-08 21:17:23 +02:00
|
|
|
|
2021-09-24 19:58:32 +02:00
|
|
|
@classmethod
|
|
|
|
|
def cross_validate_params(
|
|
|
|
|
cls,
|
|
|
|
|
*,
|
|
|
|
|
local_config: 'LocalConfig',
|
|
|
|
|
remote_config: 'RemoteConfig',
|
|
|
|
|
funding_sat: int,
|
|
|
|
|
is_local_initiator: bool, # whether we are the funder
|
|
|
|
|
initial_feerate_per_kw: int,
|
2023-08-09 15:38:59 +00:00
|
|
|
config: 'SimpleConfig',
|
|
|
|
|
peer_features: 'LnFeatures',
|
2021-09-13 11:42:55 +02:00
|
|
|
has_anchors: bool,
|
2021-09-24 19:58:32 +02:00
|
|
|
) -> None:
|
|
|
|
|
# first we validate the configs separately
|
2023-08-09 15:38:59 +00:00
|
|
|
local_config.validate_params(funding_sat=funding_sat, config=config, peer_features=peer_features)
|
|
|
|
|
remote_config.validate_params(funding_sat=funding_sat, config=config, peer_features=peer_features)
|
2021-09-24 19:58:32 +02:00
|
|
|
# now do tests that need access to both configs
|
|
|
|
|
if is_local_initiator:
|
|
|
|
|
funder, fundee = LOCAL, REMOTE
|
|
|
|
|
funder_config, fundee_config = local_config, remote_config
|
|
|
|
|
else:
|
|
|
|
|
funder, fundee = REMOTE, LOCAL
|
|
|
|
|
funder_config, fundee_config = remote_config, local_config
|
|
|
|
|
# if channel_reserve_satoshis is less than dust_limit_satoshis within the open_channel message:
|
|
|
|
|
# MUST reject the channel.
|
|
|
|
|
if remote_config.reserve_sat < local_config.dust_limit_sat:
|
|
|
|
|
raise Exception("violated constraint: remote_config.reserve_sat < local_config.dust_limit_sat")
|
|
|
|
|
# if channel_reserve_satoshis from the open_channel message is less than dust_limit_satoshis:
|
|
|
|
|
# MUST reject the channel.
|
|
|
|
|
if local_config.reserve_sat < remote_config.dust_limit_sat:
|
|
|
|
|
raise Exception("violated constraint: local_config.reserve_sat < remote_config.dust_limit_sat")
|
|
|
|
|
# The receiving node MUST fail the channel if:
|
|
|
|
|
# the funder's amount for the initial commitment transaction is not
|
|
|
|
|
# sufficient for full fee payment.
|
|
|
|
|
if funder_config.initial_msat < calc_fees_for_commitment_tx(
|
|
|
|
|
num_htlcs=0,
|
|
|
|
|
feerate=initial_feerate_per_kw,
|
2021-09-13 11:42:55 +02:00
|
|
|
is_local_initiator=is_local_initiator,
|
|
|
|
|
has_anchors=has_anchors,
|
|
|
|
|
)[funder]:
|
2021-09-24 19:58:32 +02:00
|
|
|
raise Exception(
|
|
|
|
|
"the funder's amount for the initial commitment transaction "
|
|
|
|
|
"is not sufficient for full fee payment")
|
|
|
|
|
# The receiving node MUST fail the channel if:
|
|
|
|
|
# both to_local and to_remote amounts for the initial commitment transaction are
|
|
|
|
|
# less than or equal to channel_reserve_satoshis (see BOLT 3).
|
|
|
|
|
if (max(local_config.initial_msat, remote_config.initial_msat)
|
|
|
|
|
<= 1000 * max(local_config.reserve_sat, remote_config.reserve_sat)):
|
|
|
|
|
raise Exception(
|
|
|
|
|
"both to_local and to_remote amounts for the initial commitment "
|
|
|
|
|
"transaction are less than or equal to channel_reserve_satoshis")
|
2021-09-24 20:13:58 +02:00
|
|
|
from .simple_config import FEERATE_PER_KW_MIN_RELAY_LIGHTNING
|
|
|
|
|
if initial_feerate_per_kw < FEERATE_PER_KW_MIN_RELAY_LIGHTNING:
|
|
|
|
|
raise Exception(f"feerate lower than min relay fee. {initial_feerate_per_kw} sat/kw.")
|
2021-09-24 19:58:32 +02:00
|
|
|
|
2020-06-08 21:17:23 +02:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('local_config')
|
2020-01-30 18:09:32 +01:00
|
|
|
@attr.s
|
2021-09-24 19:58:32 +02:00
|
|
|
class LocalConfig(ChannelConfig):
|
2024-01-31 09:24:42 +00:00
|
|
|
channel_seed = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex) # type: Optional[bytes]
|
2020-01-30 18:09:32 +01:00
|
|
|
funding_locked_received = attr.ib(type=bool)
|
2024-01-31 09:24:42 +00:00
|
|
|
current_commitment_signature = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
|
|
|
|
current_htlc_signatures = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
|
|
|
|
per_commitment_secret_seed = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
2020-03-31 12:43:43 +02:00
|
|
|
|
|
|
|
|
@classmethod
|
2023-07-14 14:21:50 +00:00
|
|
|
def from_seed(cls, **kwargs):
|
2020-04-06 16:53:48 +02:00
|
|
|
channel_seed = kwargs['channel_seed']
|
|
|
|
|
node = BIP32Node.from_rootseed(channel_seed, xtype='standard')
|
2020-03-31 12:43:43 +02:00
|
|
|
keypair_generator = lambda family: generate_keypair(node, family)
|
|
|
|
|
kwargs['per_commitment_secret_seed'] = keypair_generator(LnKeyFamily.REVOCATION_ROOT).privkey
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
if kwargs['multisig_key'] is None:
|
|
|
|
|
kwargs['multisig_key'] = keypair_generator(LnKeyFamily.MULTISIG)
|
2020-03-31 12:43:43 +02:00
|
|
|
kwargs['htlc_basepoint'] = keypair_generator(LnKeyFamily.HTLC_BASE)
|
|
|
|
|
kwargs['delayed_basepoint'] = keypair_generator(LnKeyFamily.DELAY_BASE)
|
|
|
|
|
kwargs['revocation_basepoint'] = keypair_generator(LnKeyFamily.REVOCATION_BASE)
|
2021-09-22 11:12:38 +02:00
|
|
|
static_remotekey = kwargs.pop('static_remotekey')
|
|
|
|
|
static_payment_key = kwargs.pop('static_payment_key')
|
|
|
|
|
if static_payment_key:
|
|
|
|
|
# We derive the payment_basepoint from a static secret (derived from
|
|
|
|
|
# the wallet seed) and a public nonce that is revealed
|
|
|
|
|
# when the funding transaction is spent. This way we can restore the
|
|
|
|
|
# payment_basepoint, needed for sweeping in the event of a force close.
|
|
|
|
|
kwargs['payment_basepoint'] = derive_payment_basepoint(
|
|
|
|
|
static_payment_secret=static_payment_key.privkey,
|
|
|
|
|
funding_pubkey=kwargs['multisig_key'].pubkey
|
|
|
|
|
)
|
|
|
|
|
elif static_remotekey: # we automatically sweep to a wallet address
|
2023-07-14 14:21:50 +00:00
|
|
|
kwargs['payment_basepoint'] = OnlyPubkeyKeypair(static_remotekey)
|
|
|
|
|
else:
|
|
|
|
|
# we expect all our channels to use option_static_remotekey, so ending up here likely indicates an issue...
|
|
|
|
|
kwargs['payment_basepoint'] = keypair_generator(LnKeyFamily.PAYMENT_BASE)
|
2021-09-22 11:12:38 +02:00
|
|
|
|
2020-03-31 12:43:43 +02:00
|
|
|
return LocalConfig(**kwargs)
|
|
|
|
|
|
2023-08-09 15:38:59 +00:00
|
|
|
def validate_params(self, *, funding_sat: int, config: 'SimpleConfig', peer_features: 'LnFeatures') -> None:
|
2020-06-19 18:15:09 +02:00
|
|
|
conf_name = type(self).__name__
|
|
|
|
|
# run base checks regardless whether LOCAL/REMOTE config
|
2023-08-09 15:38:59 +00:00
|
|
|
super().validate_params(funding_sat=funding_sat, config=config, peer_features=peer_features)
|
2020-06-19 18:15:09 +02:00
|
|
|
# run some stricter checks on LOCAL config (make sure we ourselves do the sane thing,
|
|
|
|
|
# even if we are lenient with REMOTE for compatibility reasons)
|
|
|
|
|
HTLC_MINIMUM_MSAT_MIN = 1
|
|
|
|
|
if self.htlc_minimum_msat < HTLC_MINIMUM_MSAT_MIN:
|
|
|
|
|
raise Exception(f"{conf_name}. htlc_minimum_msat too low: {self.htlc_minimum_msat} msat < {HTLC_MINIMUM_MSAT_MIN}")
|
2020-01-30 18:09:32 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('remote_config')
|
2020-01-30 18:09:32 +01:00
|
|
|
@attr.s
|
2021-09-24 19:58:32 +02:00
|
|
|
class RemoteConfig(ChannelConfig):
|
2024-01-31 09:24:42 +00:00
|
|
|
next_per_commitment_point = attr.ib(type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
|
|
|
|
current_per_commitment_point = attr.ib(default=None, type=bytes, converter=hex_to_bytes, repr=bytes_to_hex)
|
2020-01-30 18:09:32 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_in('fee_updates')
|
2020-02-01 16:30:02 +01:00
|
|
|
@attr.s
|
2020-02-04 13:34:57 +01:00
|
|
|
class FeeUpdate(StoredObject):
|
2020-02-01 16:30:02 +01:00
|
|
|
rate = attr.ib(type=int) # in sat/kw
|
|
|
|
|
ctn_local = attr.ib(default=None, type=int)
|
|
|
|
|
ctn_remote = attr.ib(default=None, type=int)
|
2019-07-23 19:23:39 +02:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('constraints')
|
2020-02-01 16:30:02 +01:00
|
|
|
@attr.s
|
2020-02-04 13:34:57 +01:00
|
|
|
class ChannelConstraints(StoredObject):
|
2023-10-13 15:49:13 +02:00
|
|
|
flags = attr.ib(type=int, converter=int)
|
2021-02-01 22:11:56 +01:00
|
|
|
capacity = attr.ib(type=int) # in sat
|
2020-03-30 01:53:34 +02:00
|
|
|
is_initiator = attr.ib(type=bool) # note: sometimes also called "funder"
|
2020-02-01 16:30:02 +01:00
|
|
|
funding_txn_minimum_depth = attr.ib(type=int)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2020-06-16 11:08:52 +02:00
|
|
|
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
CHANNEL_BACKUP_VERSION_LATEST = 2
|
|
|
|
|
KNOWN_CHANNEL_BACKUP_VERSIONS = (0, 1, 2, )
|
2023-07-14 14:21:50 +00:00
|
|
|
assert CHANNEL_BACKUP_VERSION_LATEST in KNOWN_CHANNEL_BACKUP_VERSIONS
|
2021-03-09 09:55:55 +01:00
|
|
|
|
2020-03-13 11:44:29 +01:00
|
|
|
@attr.s
|
|
|
|
|
class ChannelBackupStorage(StoredObject):
|
|
|
|
|
funding_txid = attr.ib(type=str)
|
|
|
|
|
funding_index = attr.ib(type=int, converter=int)
|
|
|
|
|
funding_address = attr.ib(type=str)
|
|
|
|
|
is_initiator = attr.ib(type=bool)
|
|
|
|
|
|
|
|
|
|
def funding_outpoint(self):
|
|
|
|
|
return Outpoint(self.funding_txid, self.funding_index)
|
|
|
|
|
|
|
|
|
|
def channel_id(self):
|
|
|
|
|
chan_id, _ = channel_id_from_funding_tx(self.funding_txid, self.funding_index)
|
|
|
|
|
return chan_id
|
|
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_in('onchain_channel_backups')
|
2021-03-09 09:55:55 +01:00
|
|
|
@attr.s
|
|
|
|
|
class OnchainChannelBackupStorage(ChannelBackupStorage):
|
2023-07-14 14:21:50 +00:00
|
|
|
node_id_prefix = attr.ib(type=bytes, converter=hex_to_bytes) # remote node pubkey
|
2021-03-09 09:55:55 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_in('imported_channel_backups')
|
2021-03-09 09:55:55 +01:00
|
|
|
@attr.s
|
|
|
|
|
class ImportedChannelBackupStorage(ChannelBackupStorage):
|
2023-07-14 14:21:50 +00:00
|
|
|
node_id = attr.ib(type=bytes, converter=hex_to_bytes) # remote node pubkey
|
|
|
|
|
privkey = attr.ib(type=bytes, converter=hex_to_bytes) # local node privkey
|
2021-03-09 09:55:55 +01:00
|
|
|
host = attr.ib(type=str)
|
|
|
|
|
port = attr.ib(type=int, converter=int)
|
|
|
|
|
channel_seed = attr.ib(type=bytes, converter=hex_to_bytes)
|
|
|
|
|
local_delay = attr.ib(type=int, converter=int)
|
|
|
|
|
remote_delay = attr.ib(type=int, converter=int)
|
|
|
|
|
remote_payment_pubkey = attr.ib(type=bytes, converter=hex_to_bytes)
|
|
|
|
|
remote_revocation_pubkey = attr.ib(type=bytes, converter=hex_to_bytes)
|
2023-08-17 14:04:05 +00:00
|
|
|
local_payment_pubkey = attr.ib(type=bytes, converter=hex_to_bytes) # type: Optional[bytes]
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
multisig_funding_privkey = attr.ib(type=bytes, converter=hex_to_bytes) # type: Optional[bytes]
|
2021-03-09 09:55:55 +01:00
|
|
|
|
2020-07-02 11:07:10 +02:00
|
|
|
def to_bytes(self) -> bytes:
|
2020-03-13 11:44:29 +01:00
|
|
|
vds = BCDataStream()
|
2023-07-14 14:21:50 +00:00
|
|
|
vds.write_uint16(CHANNEL_BACKUP_VERSION_LATEST)
|
2020-03-13 11:44:29 +01:00
|
|
|
vds.write_boolean(self.is_initiator)
|
|
|
|
|
vds.write_bytes(self.privkey, 32)
|
|
|
|
|
vds.write_bytes(self.channel_seed, 32)
|
|
|
|
|
vds.write_bytes(self.node_id, 33)
|
|
|
|
|
vds.write_bytes(bfh(self.funding_txid), 32)
|
2023-03-19 19:32:09 +00:00
|
|
|
vds.write_uint16(self.funding_index)
|
2020-03-13 11:44:29 +01:00
|
|
|
vds.write_string(self.funding_address)
|
|
|
|
|
vds.write_bytes(self.remote_payment_pubkey, 33)
|
|
|
|
|
vds.write_bytes(self.remote_revocation_pubkey, 33)
|
2023-03-19 19:32:09 +00:00
|
|
|
vds.write_uint16(self.local_delay)
|
|
|
|
|
vds.write_uint16(self.remote_delay)
|
2020-03-13 11:44:29 +01:00
|
|
|
vds.write_string(self.host)
|
2023-03-19 19:32:09 +00:00
|
|
|
vds.write_uint16(self.port)
|
2023-07-14 14:21:50 +00:00
|
|
|
vds.write_bytes(self.local_payment_pubkey, 33)
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
vds.write_bytes(self.multisig_funding_privkey, 32)
|
2020-07-02 11:07:10 +02:00
|
|
|
return bytes(vds.input)
|
2020-03-13 11:44:29 +01:00
|
|
|
|
|
|
|
|
@staticmethod
|
2023-03-19 19:15:44 +00:00
|
|
|
def from_bytes(s: bytes) -> "ImportedChannelBackupStorage":
|
2020-03-13 11:44:29 +01:00
|
|
|
vds = BCDataStream()
|
|
|
|
|
vds.write(s)
|
2023-03-19 19:32:09 +00:00
|
|
|
version = vds.read_uint16()
|
2023-07-14 14:21:50 +00:00
|
|
|
if version not in KNOWN_CHANNEL_BACKUP_VERSIONS:
|
2020-06-17 17:17:05 +02:00
|
|
|
raise Exception(f"unknown version for channel backup: {version}")
|
2023-07-14 14:21:50 +00:00
|
|
|
is_initiator = vds.read_boolean()
|
|
|
|
|
privkey = vds.read_bytes(32)
|
|
|
|
|
channel_seed = vds.read_bytes(32)
|
|
|
|
|
node_id = vds.read_bytes(33)
|
|
|
|
|
funding_txid = vds.read_bytes(32).hex()
|
|
|
|
|
funding_index = vds.read_uint16()
|
|
|
|
|
funding_address = vds.read_string()
|
|
|
|
|
remote_payment_pubkey = vds.read_bytes(33)
|
|
|
|
|
remote_revocation_pubkey = vds.read_bytes(33)
|
|
|
|
|
local_delay = vds.read_uint16()
|
|
|
|
|
remote_delay = vds.read_uint16()
|
|
|
|
|
host = vds.read_string()
|
|
|
|
|
port = vds.read_uint16()
|
|
|
|
|
if version >= 1:
|
|
|
|
|
local_payment_pubkey = vds.read_bytes(33)
|
|
|
|
|
else:
|
|
|
|
|
local_payment_pubkey = None
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
if version >= 2:
|
|
|
|
|
multisig_funding_privkey = vds.read_bytes(32)
|
|
|
|
|
else:
|
|
|
|
|
multisig_funding_privkey = None
|
2021-03-09 09:55:55 +01:00
|
|
|
return ImportedChannelBackupStorage(
|
2023-07-14 14:21:50 +00:00
|
|
|
is_initiator=is_initiator,
|
|
|
|
|
privkey=privkey,
|
|
|
|
|
channel_seed=channel_seed,
|
|
|
|
|
node_id=node_id,
|
|
|
|
|
funding_txid=funding_txid,
|
|
|
|
|
funding_index=funding_index,
|
|
|
|
|
funding_address=funding_address,
|
|
|
|
|
remote_payment_pubkey=remote_payment_pubkey,
|
|
|
|
|
remote_revocation_pubkey=remote_revocation_pubkey,
|
|
|
|
|
local_delay=local_delay,
|
|
|
|
|
remote_delay=remote_delay,
|
|
|
|
|
host=host,
|
|
|
|
|
port=port,
|
|
|
|
|
local_payment_pubkey=local_payment_pubkey,
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
multisig_funding_privkey=multisig_funding_privkey,
|
2023-03-19 19:22:41 +00:00
|
|
|
)
|
2020-03-13 11:44:29 +01:00
|
|
|
|
2023-03-19 19:15:44 +00:00
|
|
|
@staticmethod
|
|
|
|
|
def from_encrypted_str(data: str, *, password: str) -> "ImportedChannelBackupStorage":
|
|
|
|
|
if not data.startswith('channel_backup:'):
|
|
|
|
|
raise ValueError("missing or invalid magic bytes")
|
|
|
|
|
encrypted = data[15:]
|
|
|
|
|
decrypted = pw_decode_with_version_and_mac(encrypted, password)
|
|
|
|
|
return ImportedChannelBackupStorage.from_bytes(decrypted)
|
2020-03-13 11:44:29 +01:00
|
|
|
|
2019-08-04 04:55:23 +02:00
|
|
|
|
|
|
|
|
class ScriptHtlc(NamedTuple):
|
|
|
|
|
redeem_script: bytes
|
|
|
|
|
htlc: 'UpdateAddHtlc'
|
|
|
|
|
|
2018-09-25 17:08:46 +02:00
|
|
|
|
2019-10-23 17:09:41 +02:00
|
|
|
# FIXME duplicate of TxOutpoint in transaction.py??
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('funding_outpoint')
|
2020-02-01 16:30:02 +01:00
|
|
|
@attr.s
|
2020-02-04 13:34:57 +01:00
|
|
|
class Outpoint(StoredObject):
|
2020-02-01 16:30:02 +01:00
|
|
|
txid = attr.ib(type=str)
|
|
|
|
|
output_index = attr.ib(type=int)
|
|
|
|
|
|
2018-09-12 16:17:10 +02:00
|
|
|
def to_str(self):
|
|
|
|
|
return "{}:{}".format(self.txid, self.output_index)
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2021-01-30 16:10:51 +01:00
|
|
|
class HtlcLog(NamedTuple):
|
2019-12-10 03:17:57 +01:00
|
|
|
success: bool
|
2021-03-02 18:35:07 +01:00
|
|
|
amount_msat: int # amount for receiver (e.g. from invoice)
|
2019-12-10 03:17:57 +01:00
|
|
|
route: Optional['LNPaymentRoute'] = None
|
|
|
|
|
preimage: Optional[bytes] = None
|
2021-01-30 16:10:51 +01:00
|
|
|
error_bytes: Optional[bytes] = None
|
2021-03-19 19:13:50 +01:00
|
|
|
failure_msg: Optional['OnionRoutingFailure'] = None
|
2021-01-30 16:10:51 +01:00
|
|
|
sender_idx: Optional[int] = None
|
2022-02-16 16:27:37 +01:00
|
|
|
trampoline_fee_level: Optional[int] = None
|
2019-12-10 03:17:57 +01:00
|
|
|
|
2020-03-10 16:44:23 +01:00
|
|
|
def formatted_tuple(self):
|
2021-01-30 16:10:51 +01:00
|
|
|
route = self.route
|
|
|
|
|
route_str = '%d'%len(route)
|
|
|
|
|
short_channel_id = None
|
|
|
|
|
if not self.success:
|
|
|
|
|
sender_idx = self.sender_idx
|
|
|
|
|
failure_msg = self.failure_msg
|
|
|
|
|
if sender_idx is not None:
|
|
|
|
|
try:
|
|
|
|
|
short_channel_id = route[sender_idx + 1].short_channel_id
|
|
|
|
|
except IndexError:
|
|
|
|
|
# payment destination reported error
|
|
|
|
|
short_channel_id = _("Destination node")
|
|
|
|
|
message = failure_msg.code_name()
|
2020-03-10 16:44:23 +01:00
|
|
|
else:
|
2021-01-30 16:10:51 +01:00
|
|
|
short_channel_id = route[-1].short_channel_id
|
|
|
|
|
message = _('Success')
|
|
|
|
|
chan_str = str(short_channel_id) if short_channel_id else _("Unknown")
|
2020-03-10 16:44:23 +01:00
|
|
|
return route_str, chan_str, message
|
|
|
|
|
|
2019-12-10 03:17:57 +01:00
|
|
|
|
2018-10-15 11:47:42 +02:00
|
|
|
class LightningError(Exception): pass
|
|
|
|
|
class UnableToDeriveSecret(LightningError): pass
|
2018-10-23 20:32:18 +02:00
|
|
|
class RemoteMisbehaving(LightningError): pass
|
2018-10-18 22:56:40 +02:00
|
|
|
|
2018-11-05 20:29:44 +01:00
|
|
|
class NotFoundChanAnnouncementForUpdate(Exception): pass
|
2021-03-15 20:21:37 +01:00
|
|
|
class InvalidGossipMsg(Exception):
|
|
|
|
|
"""e.g. signature check failed"""
|
2018-11-05 20:29:44 +01:00
|
|
|
|
2019-11-18 05:12:13 +01:00
|
|
|
class PaymentFailure(UserFacingException): pass
|
2021-02-19 09:27:28 +01:00
|
|
|
class NoPathFound(PaymentFailure):
|
|
|
|
|
def __str__(self):
|
2025-01-28 14:51:15 +01:00
|
|
|
return _('No path found')
|
|
|
|
|
class FeeBudgetExceeded(PaymentFailure):
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return _('Fee budget exceeded')
|
2018-10-18 22:56:40 +02:00
|
|
|
|
2022-01-26 16:52:38 +01:00
|
|
|
|
|
|
|
|
class LNProtocolError(Exception):
|
|
|
|
|
"""Raised in peer methods to trigger an error message."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class LNProtocolWarning(Exception):
|
|
|
|
|
"""Raised in peer methods to trigger a warning message."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-08-14 21:38:02 +02:00
|
|
|
# TODO make some of these values configurable?
|
2020-03-06 11:40:08 +01:00
|
|
|
REDEEM_AFTER_DOUBLE_SPENT_DELAY = 30
|
2019-08-14 21:38:02 +02:00
|
|
|
|
2020-04-03 18:54:02 +02:00
|
|
|
CHANNEL_OPENING_TIMEOUT = 24*60*60
|
|
|
|
|
|
2021-04-04 19:53:10 +02:00
|
|
|
# Small capacity channels are problematic for many reasons. As the onchain fees start to become
|
|
|
|
|
# significant compared to the capacity, things start to break down. e.g. the counterparty
|
|
|
|
|
# force-closing the channel costs much of the funds in the channel.
|
|
|
|
|
# Closing a channel uses ~200 vbytes onchain, feerates could spike to 100 sat/vbyte or even higher;
|
|
|
|
|
# that in itself is already 20_000 sats. This mining fee is reserved and cannot be used for payments.
|
|
|
|
|
# The value below is chosen arbitrarily to be one order of magnitude higher than that.
|
2020-06-08 21:17:23 +02:00
|
|
|
MIN_FUNDING_SAT = 200_000
|
|
|
|
|
|
2019-08-14 21:38:02 +02:00
|
|
|
##### CLTV-expiry-delta-related values
|
|
|
|
|
# see https://github.com/lightningnetwork/lightning-rfc/blob/master/02-peer-protocol.md#cltv_expiry_delta-selection
|
|
|
|
|
|
2020-04-13 17:04:27 +02:00
|
|
|
# the minimum cltv_expiry accepted for newly received HTLCs
|
|
|
|
|
# note: when changing, consider Blockchain.is_tip_stale()
|
2023-10-19 16:33:04 +00:00
|
|
|
MIN_FINAL_CLTV_DELTA_ACCEPTED = 144
|
2019-08-14 21:38:02 +02:00
|
|
|
# set it a tiny bit higher for invoices as blocks could get mined
|
|
|
|
|
# during forward path of payment
|
2023-10-19 16:33:04 +00:00
|
|
|
MIN_FINAL_CLTV_DELTA_FOR_INVOICE = MIN_FINAL_CLTV_DELTA_ACCEPTED + 3
|
2019-08-14 21:38:02 +02:00
|
|
|
|
|
|
|
|
# the deadline for offered HTLCs:
|
|
|
|
|
# the deadline after which the channel has to be failed and timed out on-chain
|
2023-10-19 16:33:04 +00:00
|
|
|
NBLOCK_DEADLINE_DELTA_AFTER_EXPIRY_FOR_OFFERED_HTLCS = 1
|
2019-08-14 21:38:02 +02:00
|
|
|
|
|
|
|
|
# the deadline for received HTLCs this node has fulfilled:
|
|
|
|
|
# the deadline after which the channel has to be failed and the HTLC fulfilled on-chain before its cltv_expiry
|
2023-10-19 16:33:04 +00:00
|
|
|
NBLOCK_DEADLINE_DELTA_BEFORE_EXPIRY_FOR_RECEIVED_HTLCS = 72
|
2019-08-14 21:38:02 +02:00
|
|
|
|
2023-10-19 16:33:04 +00:00
|
|
|
NBLOCK_CLTV_DELTA_TOO_FAR_INTO_FUTURE = 28 * 144
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2019-01-28 20:13:09 +01:00
|
|
|
MAXIMUM_REMOTE_TO_SELF_DELAY_ACCEPTED = 2016
|
|
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
class RevocationStore:
|
2020-03-30 02:28:50 +02:00
|
|
|
# closely based on code in lightningnetwork/lnd
|
2018-07-09 00:15:55 +02:00
|
|
|
|
|
|
|
|
START_INDEX = 2 ** 48 - 1
|
|
|
|
|
|
2020-01-30 18:09:32 +01:00
|
|
|
def __init__(self, storage):
|
2020-02-04 13:35:58 +01:00
|
|
|
if len(storage) == 0:
|
|
|
|
|
storage['index'] = self.START_INDEX
|
|
|
|
|
storage['buckets'] = {}
|
|
|
|
|
self.storage = storage
|
|
|
|
|
self.buckets = storage['buckets']
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
def add_next_entry(self, hsh):
|
2020-02-04 13:35:58 +01:00
|
|
|
index = self.storage['index']
|
|
|
|
|
new_element = ShachainElement(index=index, secret=hsh)
|
|
|
|
|
bucket = count_trailing_zeros(index)
|
2018-06-28 15:50:45 +02:00
|
|
|
for i in range(0, bucket):
|
|
|
|
|
this_bucket = self.buckets[i]
|
|
|
|
|
e = shachain_derive(new_element, this_bucket.index)
|
|
|
|
|
if e != this_bucket:
|
2023-02-17 11:35:03 +00:00
|
|
|
raise Exception("hash is not derivable: {} {} {}".format(e.secret.hex(), this_bucket.secret.hex(), this_bucket.index))
|
2018-06-28 15:50:45 +02:00
|
|
|
self.buckets[bucket] = new_element
|
2020-02-04 13:35:58 +01:00
|
|
|
self.storage['index'] = index - 1
|
2018-07-09 00:15:55 +02:00
|
|
|
|
|
|
|
|
def retrieve_secret(self, index: int) -> bytes:
|
2019-08-05 17:43:06 +02:00
|
|
|
assert index <= self.START_INDEX, index
|
2020-02-04 12:11:18 +01:00
|
|
|
for i in range(0, 49):
|
|
|
|
|
bucket = self.buckets.get(i)
|
2018-07-09 00:15:55 +02:00
|
|
|
if bucket is None:
|
|
|
|
|
raise UnableToDeriveSecret()
|
|
|
|
|
try:
|
|
|
|
|
element = shachain_derive(bucket, index)
|
|
|
|
|
except UnableToDeriveSecret:
|
|
|
|
|
continue
|
|
|
|
|
return element.secret
|
|
|
|
|
raise UnableToDeriveSecret()
|
|
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
def __eq__(self, o):
|
|
|
|
|
return type(o) is RevocationStore and self.serialize() == o.serialize()
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
def __hash__(self):
|
|
|
|
|
return hash(json.dumps(self.serialize(), sort_keys=True))
|
|
|
|
|
|
2018-10-10 22:54:30 +02:00
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
def count_trailing_zeros(index):
|
|
|
|
|
""" BOLT-03 (where_to_put_secret) """
|
|
|
|
|
try:
|
|
|
|
|
return list(reversed(bin(index)[2:])).index("1")
|
|
|
|
|
except ValueError:
|
|
|
|
|
return 48
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def shachain_derive(element, to_index):
|
|
|
|
|
def get_prefix(index, pos):
|
|
|
|
|
mask = (1 << 64) - 1 - ((1 << pos) - 1)
|
|
|
|
|
return index & mask
|
|
|
|
|
from_index = element.index
|
|
|
|
|
zeros = count_trailing_zeros(from_index)
|
|
|
|
|
if from_index != get_prefix(to_index, zeros):
|
|
|
|
|
raise UnableToDeriveSecret("prefixes are different; index not derivable")
|
|
|
|
|
return ShachainElement(
|
|
|
|
|
get_per_commitment_secret_from_seed(element.secret, to_index, zeros),
|
|
|
|
|
to_index)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
class ShachainElement(NamedTuple):
|
|
|
|
|
secret: bytes
|
|
|
|
|
index: int
|
|
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return "ShachainElement(" + self.secret.hex() + "," + str(self.index) + ")"
|
|
|
|
|
|
|
|
|
|
@stored_in('buckets', tuple)
|
|
|
|
|
def read(*x):
|
|
|
|
|
return ShachainElement(bfh(x[0]), int(x[1]))
|
|
|
|
|
|
2018-06-28 15:50:45 +02:00
|
|
|
|
|
|
|
|
def get_per_commitment_secret_from_seed(seed: bytes, i: int, bits: int = 48) -> bytes:
|
|
|
|
|
"""Generate per commitment secret."""
|
|
|
|
|
per_commitment_secret = bytearray(seed)
|
|
|
|
|
for bitindex in range(bits - 1, -1, -1):
|
|
|
|
|
mask = 1 << bitindex
|
|
|
|
|
if i & mask:
|
|
|
|
|
per_commitment_secret[bitindex // 8] ^= 1 << (bitindex % 8)
|
|
|
|
|
per_commitment_secret = bytearray(sha256(per_commitment_secret))
|
|
|
|
|
bajts = bytes(per_commitment_secret)
|
|
|
|
|
return bajts
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def secret_to_pubkey(secret: int) -> bytes:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(secret) is int
|
2018-07-09 00:15:55 +02:00
|
|
|
return ecc.ECPrivkey.from_secret_scalar(secret).get_public_key_bytes(compressed=True)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-10-15 11:16:35 +02:00
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def derive_pubkey(basepoint: bytes, per_commitment_point: bytes) -> bytes:
|
2020-02-06 20:59:57 +01:00
|
|
|
p = ecc.ECPubkey(basepoint) + ecc.GENERATOR * ecc.string_to_number(sha256(per_commitment_point + basepoint))
|
2018-06-28 15:50:45 +02:00
|
|
|
return p.get_public_key_bytes()
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def derive_privkey(secret: int, per_commitment_point: bytes) -> int:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(secret) is int
|
2018-10-22 18:57:51 +02:00
|
|
|
basepoint_bytes = secret_to_pubkey(secret)
|
|
|
|
|
basepoint = secret + ecc.string_to_number(sha256(per_commitment_point + basepoint_bytes))
|
2018-06-28 15:50:45 +02:00
|
|
|
basepoint %= CURVE_ORDER
|
|
|
|
|
return basepoint
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def derive_blinded_pubkey(basepoint: bytes, per_commitment_point: bytes) -> bytes:
|
2018-06-28 15:50:45 +02:00
|
|
|
k1 = ecc.ECPubkey(basepoint) * ecc.string_to_number(sha256(basepoint + per_commitment_point))
|
|
|
|
|
k2 = ecc.ECPubkey(per_commitment_point) * ecc.string_to_number(sha256(per_commitment_point + basepoint))
|
|
|
|
|
return (k1 + k2).get_public_key_bytes()
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def derive_blinded_privkey(basepoint_secret: bytes, per_commitment_secret: bytes) -> bytes:
|
|
|
|
|
basepoint = ecc.ECPrivkey(basepoint_secret).get_public_key_bytes(compressed=True)
|
|
|
|
|
per_commitment_point = ecc.ECPrivkey(per_commitment_secret).get_public_key_bytes(compressed=True)
|
|
|
|
|
k1 = ecc.string_to_number(basepoint_secret) * ecc.string_to_number(sha256(basepoint + per_commitment_point))
|
|
|
|
|
k2 = ecc.string_to_number(per_commitment_secret) * ecc.string_to_number(sha256(per_commitment_point + basepoint))
|
|
|
|
|
sum = (k1 + k2) % ecc.CURVE_ORDER
|
2020-02-04 19:41:06 +01:00
|
|
|
return int.to_bytes(sum, length=32, byteorder='big', signed=False)
|
2018-07-09 00:15:55 +02:00
|
|
|
|
|
|
|
|
|
2021-09-22 11:12:38 +02:00
|
|
|
def derive_payment_basepoint(static_payment_secret: bytes, funding_pubkey: bytes) -> Keypair:
|
|
|
|
|
assert isinstance(static_payment_secret, bytes)
|
|
|
|
|
assert isinstance(funding_pubkey, bytes)
|
|
|
|
|
payment_basepoint = ecc.ECPrivkey(sha256(static_payment_secret + funding_pubkey))
|
|
|
|
|
return Keypair(
|
|
|
|
|
pubkey=payment_basepoint.get_public_key_bytes(),
|
|
|
|
|
privkey=payment_basepoint.get_secret_bytes()
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
def derive_multisig_funding_key_if_we_opened(
|
|
|
|
|
*,
|
|
|
|
|
funding_root_secret: bytes,
|
|
|
|
|
remote_node_id_or_prefix: bytes,
|
|
|
|
|
nlocktime: int,
|
|
|
|
|
) -> Keypair:
|
|
|
|
|
from .lnworker import NODE_ID_PREFIX_LEN
|
|
|
|
|
assert isinstance(funding_root_secret, bytes)
|
|
|
|
|
assert len(funding_root_secret) == 32
|
|
|
|
|
assert isinstance(remote_node_id_or_prefix, bytes)
|
|
|
|
|
assert len(remote_node_id_or_prefix) in (NODE_ID_PREFIX_LEN, 33)
|
|
|
|
|
assert isinstance(nlocktime, int)
|
|
|
|
|
nlocktime_bytes = int.to_bytes(nlocktime, length=4, byteorder="little", signed=False)
|
|
|
|
|
node_id_prefix = remote_node_id_or_prefix[0:NODE_ID_PREFIX_LEN]
|
|
|
|
|
funding_key = ecc.ECPrivkey(bitcoin.bip340_tagged_hash(
|
|
|
|
|
tag=b"electrum/ln_multisig_funding_key/we_opened",
|
|
|
|
|
msg=funding_root_secret + node_id_prefix + nlocktime_bytes,
|
|
|
|
|
))
|
|
|
|
|
return Keypair(
|
|
|
|
|
pubkey=funding_key.get_public_key_bytes(),
|
|
|
|
|
privkey=funding_key.get_secret_bytes(),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def derive_multisig_funding_key_if_they_opened(
|
|
|
|
|
*,
|
|
|
|
|
funding_root_secret: bytes,
|
|
|
|
|
remote_node_id_or_prefix: bytes,
|
|
|
|
|
remote_funding_pubkey: bytes,
|
|
|
|
|
) -> Keypair:
|
|
|
|
|
from .lnworker import NODE_ID_PREFIX_LEN
|
|
|
|
|
assert isinstance(funding_root_secret, bytes)
|
|
|
|
|
assert len(funding_root_secret) == 32
|
|
|
|
|
assert isinstance(remote_node_id_or_prefix, bytes)
|
|
|
|
|
assert len(remote_node_id_or_prefix) in (NODE_ID_PREFIX_LEN, 33)
|
|
|
|
|
assert isinstance(remote_funding_pubkey, bytes)
|
|
|
|
|
assert len(remote_funding_pubkey) == 33
|
|
|
|
|
node_id_prefix = remote_node_id_or_prefix[0:NODE_ID_PREFIX_LEN]
|
|
|
|
|
funding_key = ecc.ECPrivkey(bitcoin.bip340_tagged_hash(
|
|
|
|
|
tag=b"electrum/ln_multisig_funding_key/they_opened",
|
|
|
|
|
msg=funding_root_secret + node_id_prefix + remote_funding_pubkey,
|
|
|
|
|
))
|
|
|
|
|
return Keypair(
|
|
|
|
|
pubkey=funding_key.get_public_key_bytes(),
|
|
|
|
|
privkey=funding_key.get_secret_bytes(),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2024-04-26 20:09:00 +00:00
|
|
|
def make_htlc_tx_output(
|
2021-09-13 13:51:51 +02:00
|
|
|
amount_msat,
|
|
|
|
|
local_feerate,
|
|
|
|
|
revocationpubkey,
|
|
|
|
|
local_delayedpubkey,
|
|
|
|
|
success,
|
|
|
|
|
to_self_delay,
|
|
|
|
|
has_anchors: bool
|
2024-04-26 20:09:00 +00:00
|
|
|
) -> Tuple[bytes, PartialTxOutput]:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(amount_msat) is int
|
|
|
|
|
assert type(local_feerate) is int
|
2021-11-01 18:05:33 +01:00
|
|
|
script = make_commitment_output_to_local_witness_script(
|
|
|
|
|
revocation_pubkey=revocationpubkey,
|
|
|
|
|
to_self_delay=to_self_delay,
|
|
|
|
|
delayed_pubkey=local_delayedpubkey,
|
|
|
|
|
)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2024-04-26 20:09:00 +00:00
|
|
|
p2wsh = bitcoin.redeem_script_to_address('p2wsh', script)
|
2021-09-13 13:51:51 +02:00
|
|
|
weight = effective_htlc_tx_weight(success=success, has_anchors=has_anchors)
|
2018-06-28 15:50:45 +02:00
|
|
|
fee = local_feerate * weight
|
2018-07-20 16:17:18 +02:00
|
|
|
fee = fee // 1000 * 1000
|
2018-06-28 15:50:45 +02:00
|
|
|
final_amount_sat = (amount_msat - fee) // 1000
|
|
|
|
|
assert final_amount_sat > 0, final_amount_sat
|
2019-10-23 17:09:41 +02:00
|
|
|
output = PartialTxOutput.from_address_and_value(p2wsh, final_amount_sat)
|
2018-10-22 18:57:51 +02:00
|
|
|
return script, output
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-10-23 16:44:39 +02:00
|
|
|
def make_htlc_tx_witness(remotehtlcsig: bytes, localhtlcsig: bytes,
|
|
|
|
|
payment_preimage: bytes, witness_script: bytes) -> bytes:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(remotehtlcsig) is bytes
|
|
|
|
|
assert type(localhtlcsig) is bytes
|
|
|
|
|
assert type(payment_preimage) is bytes
|
|
|
|
|
assert type(witness_script) is bytes
|
2024-04-26 20:09:00 +00:00
|
|
|
return construct_witness([0, remotehtlcsig, localhtlcsig, payment_preimage, witness_script])
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-10-23 16:44:39 +02:00
|
|
|
def make_htlc_tx_inputs(htlc_output_txid: str, htlc_output_index: int,
|
2024-05-24 13:40:48 +00:00
|
|
|
amount_msat: int, witness_script: bytes) -> List[PartialTxInput]:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(htlc_output_txid) is str
|
|
|
|
|
assert type(htlc_output_index) is int
|
|
|
|
|
assert type(amount_msat) is int
|
2024-05-24 13:40:48 +00:00
|
|
|
assert type(witness_script) is bytes
|
2019-10-23 17:09:41 +02:00
|
|
|
txin = PartialTxInput(prevout=TxOutpoint(txid=bfh(htlc_output_txid), out_idx=htlc_output_index),
|
|
|
|
|
nsequence=0)
|
2024-05-24 13:40:48 +00:00
|
|
|
txin.witness_script = witness_script
|
2019-10-23 17:09:41 +02:00
|
|
|
txin.script_sig = b''
|
|
|
|
|
txin._trusted_value_sats = amount_msat // 1000
|
|
|
|
|
c_inputs = [txin]
|
2018-06-28 15:50:45 +02:00
|
|
|
return c_inputs
|
|
|
|
|
|
2023-10-19 16:33:04 +00:00
|
|
|
def make_htlc_tx(*, cltv_abs: int, inputs: List[PartialTxInput], output: PartialTxOutput) -> PartialTransaction:
|
|
|
|
|
assert type(cltv_abs) is int
|
2018-06-28 15:50:45 +02:00
|
|
|
c_outputs = [output]
|
2023-10-19 16:33:04 +00:00
|
|
|
tx = PartialTransaction.from_io(inputs, c_outputs, locktime=cltv_abs, version=2)
|
2018-06-28 15:50:45 +02:00
|
|
|
return tx
|
|
|
|
|
|
2023-10-19 16:33:04 +00:00
|
|
|
def make_offered_htlc(
|
|
|
|
|
*,
|
|
|
|
|
revocation_pubkey: bytes,
|
|
|
|
|
remote_htlcpubkey: bytes,
|
|
|
|
|
local_htlcpubkey: bytes,
|
|
|
|
|
payment_hash: bytes,
|
2021-09-13 13:51:51 +02:00
|
|
|
has_anchors: bool,
|
2023-10-19 16:33:04 +00:00
|
|
|
) -> bytes:
|
2018-06-28 15:50:45 +02:00
|
|
|
assert type(revocation_pubkey) is bytes
|
|
|
|
|
assert type(remote_htlcpubkey) is bytes
|
|
|
|
|
assert type(local_htlcpubkey) is bytes
|
|
|
|
|
assert type(payment_hash) is bytes
|
2021-09-13 13:51:51 +02:00
|
|
|
script_opcodes = [
|
2020-10-24 06:08:34 +02:00
|
|
|
opcodes.OP_DUP,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
bitcoin.hash_160(revocation_pubkey),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
remote_htlcpubkey,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
opcodes.OP_SIZE,
|
|
|
|
|
32,
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_NOTIF,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
2,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
local_htlcpubkey,
|
|
|
|
|
2,
|
|
|
|
|
opcodes.OP_CHECKMULTISIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
crypto.ripemd(payment_hash),
|
|
|
|
|
opcodes.OP_EQUALVERIFY,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ENDIF,
|
2021-09-13 13:51:51 +02:00
|
|
|
]
|
|
|
|
|
if has_anchors:
|
|
|
|
|
script_opcodes.extend([1, opcodes.OP_CHECKSEQUENCEVERIFY, opcodes.OP_DROP])
|
|
|
|
|
script_opcodes.append(opcodes.OP_ENDIF)
|
|
|
|
|
script = construct_script(script_opcodes)
|
2020-10-24 06:08:34 +02:00
|
|
|
return script
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2023-10-19 16:33:04 +00:00
|
|
|
def make_received_htlc(
|
|
|
|
|
*,
|
|
|
|
|
revocation_pubkey: bytes,
|
|
|
|
|
remote_htlcpubkey: bytes,
|
|
|
|
|
local_htlcpubkey: bytes,
|
|
|
|
|
payment_hash: bytes,
|
|
|
|
|
cltv_abs: int,
|
2021-09-13 13:51:51 +02:00
|
|
|
has_anchors: bool,
|
2023-10-19 16:33:04 +00:00
|
|
|
) -> bytes:
|
2018-06-28 15:50:45 +02:00
|
|
|
for i in [revocation_pubkey, remote_htlcpubkey, local_htlcpubkey, payment_hash]:
|
|
|
|
|
assert type(i) is bytes
|
2023-10-19 16:33:04 +00:00
|
|
|
assert type(cltv_abs) is int
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2021-09-13 13:51:51 +02:00
|
|
|
script_opcodes = [
|
2020-10-24 06:08:34 +02:00
|
|
|
opcodes.OP_DUP,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
bitcoin.hash_160(revocation_pubkey),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
remote_htlcpubkey,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
opcodes.OP_SIZE,
|
|
|
|
|
32,
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
crypto.ripemd(payment_hash),
|
|
|
|
|
opcodes.OP_EQUALVERIFY,
|
|
|
|
|
2,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
local_htlcpubkey,
|
|
|
|
|
2,
|
|
|
|
|
opcodes.OP_CHECKMULTISIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
opcodes.OP_DROP,
|
2023-10-19 16:33:04 +00:00
|
|
|
cltv_abs,
|
2020-10-24 06:08:34 +02:00
|
|
|
opcodes.OP_CHECKLOCKTIMEVERIFY,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ENDIF,
|
2021-09-13 13:51:51 +02:00
|
|
|
]
|
|
|
|
|
if has_anchors:
|
|
|
|
|
script_opcodes.extend([1, opcodes.OP_CHECKSEQUENCEVERIFY, opcodes.OP_DROP])
|
|
|
|
|
script_opcodes.append(opcodes.OP_ENDIF)
|
|
|
|
|
script = construct_script(script_opcodes)
|
2020-10-24 06:08:34 +02:00
|
|
|
return script
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2022-04-28 10:21:47 +02:00
|
|
|
WITNESS_TEMPLATE_OFFERED_HTLC = [
|
|
|
|
|
opcodes.OP_DUP,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
opcodes.OP_SIZE,
|
|
|
|
|
OPPushDataGeneric(lambda x: x==1),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_NOTIF,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
opcodes.OP_2,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_2,
|
|
|
|
|
opcodes.OP_CHECKMULTISIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_EQUALVERIFY,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
WITNESS_TEMPLATE_RECEIVED_HTLC = [
|
|
|
|
|
opcodes.OP_DUP,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
opcodes.OP_SIZE,
|
|
|
|
|
OPPushDataGeneric(lambda x: x==1),
|
|
|
|
|
opcodes.OP_EQUAL,
|
|
|
|
|
opcodes.OP_IF,
|
|
|
|
|
opcodes.OP_HASH160,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_EQUALVERIFY,
|
|
|
|
|
opcodes.OP_2,
|
|
|
|
|
opcodes.OP_SWAP,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_2,
|
|
|
|
|
opcodes.OP_CHECKMULTISIG,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
OPPushDataGeneric(None),
|
|
|
|
|
opcodes.OP_CHECKLOCKTIMEVERIFY,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
2023-10-19 16:33:04 +00:00
|
|
|
def make_htlc_output_witness_script(
|
|
|
|
|
*,
|
|
|
|
|
is_received_htlc: bool,
|
|
|
|
|
remote_revocation_pubkey: bytes,
|
|
|
|
|
remote_htlc_pubkey: bytes,
|
|
|
|
|
local_htlc_pubkey: bytes,
|
|
|
|
|
payment_hash: bytes,
|
|
|
|
|
cltv_abs: Optional[int],
|
2021-09-13 13:51:51 +02:00
|
|
|
has_anchors: bool,
|
2023-10-19 16:33:04 +00:00
|
|
|
) -> bytes:
|
2018-10-23 16:44:39 +02:00
|
|
|
if is_received_htlc:
|
2021-09-13 13:51:51 +02:00
|
|
|
return make_received_htlc(
|
|
|
|
|
revocation_pubkey=remote_revocation_pubkey,
|
|
|
|
|
remote_htlcpubkey=remote_htlc_pubkey,
|
|
|
|
|
local_htlcpubkey=local_htlc_pubkey,
|
|
|
|
|
payment_hash=payment_hash,
|
|
|
|
|
cltv_abs=cltv_abs,
|
|
|
|
|
has_anchors=has_anchors,
|
|
|
|
|
)
|
2018-10-23 16:44:39 +02:00
|
|
|
else:
|
2021-09-13 13:51:51 +02:00
|
|
|
return make_offered_htlc(
|
|
|
|
|
revocation_pubkey=remote_revocation_pubkey,
|
|
|
|
|
remote_htlcpubkey=remote_htlc_pubkey,
|
|
|
|
|
local_htlcpubkey=local_htlc_pubkey,
|
|
|
|
|
payment_hash=payment_hash,
|
|
|
|
|
has_anchors=has_anchors,
|
|
|
|
|
)
|
2018-10-23 16:44:39 +02:00
|
|
|
|
|
|
|
|
|
2020-04-13 15:57:53 +02:00
|
|
|
def get_ordered_channel_configs(chan: 'AbstractChannel', for_us: bool) -> Tuple[Union[LocalConfig, RemoteConfig],
|
|
|
|
|
Union[LocalConfig, RemoteConfig]]:
|
2018-10-10 22:54:30 +02:00
|
|
|
conf = chan.config[LOCAL] if for_us else chan.config[REMOTE]
|
|
|
|
|
other_conf = chan.config[LOCAL] if not for_us else chan.config[REMOTE]
|
2018-10-23 16:44:39 +02:00
|
|
|
return conf, other_conf
|
|
|
|
|
|
|
|
|
|
|
2019-09-07 07:37:13 +02:00
|
|
|
def possible_output_idxs_of_htlc_in_ctx(*, chan: 'Channel', pcp: bytes, subject: 'HTLCOwner',
|
|
|
|
|
htlc_direction: 'Direction', ctx: Transaction,
|
|
|
|
|
htlc: 'UpdateAddHtlc') -> Set[int]:
|
2023-10-19 16:33:04 +00:00
|
|
|
amount_msat, cltv_abs, payment_hash = htlc.amount_msat, htlc.cltv_abs, htlc.payment_hash
|
2019-09-07 07:37:13 +02:00
|
|
|
for_us = subject == LOCAL
|
|
|
|
|
conf, other_conf = get_ordered_channel_configs(chan=chan, for_us=for_us)
|
|
|
|
|
|
|
|
|
|
other_revocation_pubkey = derive_blinded_pubkey(other_conf.revocation_basepoint.pubkey, pcp)
|
|
|
|
|
other_htlc_pubkey = derive_pubkey(other_conf.htlc_basepoint.pubkey, pcp)
|
|
|
|
|
htlc_pubkey = derive_pubkey(conf.htlc_basepoint.pubkey, pcp)
|
2024-05-24 13:40:48 +00:00
|
|
|
witness_script = make_htlc_output_witness_script(
|
|
|
|
|
is_received_htlc=htlc_direction == RECEIVED,
|
|
|
|
|
remote_revocation_pubkey=other_revocation_pubkey,
|
|
|
|
|
remote_htlc_pubkey=other_htlc_pubkey,
|
|
|
|
|
local_htlc_pubkey=htlc_pubkey,
|
|
|
|
|
payment_hash=payment_hash,
|
|
|
|
|
cltv_abs=cltv_abs,
|
2021-09-13 13:51:51 +02:00
|
|
|
has_anchors=chan.has_anchors(),
|
2024-05-24 13:40:48 +00:00
|
|
|
)
|
|
|
|
|
htlc_address = redeem_script_to_address('p2wsh', witness_script)
|
2019-09-07 07:37:13 +02:00
|
|
|
candidates = ctx.get_output_idxs_from_address(htlc_address)
|
|
|
|
|
return {output_idx for output_idx in candidates
|
|
|
|
|
if ctx.outputs()[output_idx].value == htlc.amount_msat // 1000}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def map_htlcs_to_ctx_output_idxs(*, chan: 'Channel', ctx: Transaction, pcp: bytes,
|
|
|
|
|
subject: 'HTLCOwner', ctn: int) -> Dict[Tuple['Direction', 'UpdateAddHtlc'], Tuple[int, int]]:
|
|
|
|
|
"""Returns a dict from (htlc_dir, htlc) to (ctx_output_idx, htlc_relative_idx)"""
|
|
|
|
|
htlc_to_ctx_output_idx_map = {} # type: Dict[Tuple[Direction, UpdateAddHtlc], int]
|
|
|
|
|
unclaimed_ctx_output_idxs = set(range(len(ctx.outputs())))
|
|
|
|
|
offered_htlcs = chan.included_htlcs(subject, SENT, ctn=ctn)
|
2023-10-19 16:33:04 +00:00
|
|
|
offered_htlcs.sort(key=lambda htlc: htlc.cltv_abs)
|
2019-09-07 07:37:13 +02:00
|
|
|
received_htlcs = chan.included_htlcs(subject, RECEIVED, ctn=ctn)
|
2023-10-19 16:33:04 +00:00
|
|
|
received_htlcs.sort(key=lambda htlc: htlc.cltv_abs)
|
2019-09-07 07:37:13 +02:00
|
|
|
for direction, htlcs in zip([SENT, RECEIVED], [offered_htlcs, received_htlcs]):
|
|
|
|
|
for htlc in htlcs:
|
|
|
|
|
cands = sorted(possible_output_idxs_of_htlc_in_ctx(chan=chan,
|
|
|
|
|
pcp=pcp,
|
|
|
|
|
subject=subject,
|
|
|
|
|
htlc_direction=direction,
|
|
|
|
|
ctx=ctx,
|
|
|
|
|
htlc=htlc))
|
|
|
|
|
for ctx_output_idx in cands:
|
|
|
|
|
if ctx_output_idx in unclaimed_ctx_output_idxs:
|
|
|
|
|
unclaimed_ctx_output_idxs.discard(ctx_output_idx)
|
|
|
|
|
htlc_to_ctx_output_idx_map[(direction, htlc)] = ctx_output_idx
|
|
|
|
|
break
|
|
|
|
|
# calc htlc_relative_idx
|
|
|
|
|
inverse_map = {ctx_output_idx: (direction, htlc)
|
|
|
|
|
for ((direction, htlc), ctx_output_idx) in htlc_to_ctx_output_idx_map.items()}
|
|
|
|
|
|
|
|
|
|
return {inverse_map[ctx_output_idx]: (ctx_output_idx, htlc_relative_idx)
|
|
|
|
|
for htlc_relative_idx, ctx_output_idx in enumerate(sorted(inverse_map))}
|
|
|
|
|
|
|
|
|
|
|
2020-04-30 19:37:06 +02:00
|
|
|
def make_htlc_tx_with_open_channel(*, chan: 'Channel', pcp: bytes, subject: 'HTLCOwner', ctn: int,
|
2019-09-07 07:37:13 +02:00
|
|
|
htlc_direction: 'Direction', commit: Transaction, ctx_output_idx: int,
|
2019-10-23 17:09:41 +02:00
|
|
|
htlc: 'UpdateAddHtlc', name: str = None) -> Tuple[bytes, PartialTransaction]:
|
2023-10-19 16:33:04 +00:00
|
|
|
amount_msat, cltv_abs, payment_hash = htlc.amount_msat, htlc.cltv_abs, htlc.payment_hash
|
2019-09-07 07:37:13 +02:00
|
|
|
for_us = subject == LOCAL
|
2018-10-23 16:44:39 +02:00
|
|
|
conf, other_conf = get_ordered_channel_configs(chan=chan, for_us=for_us)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
|
|
|
|
delayedpubkey = derive_pubkey(conf.delayed_basepoint.pubkey, pcp)
|
|
|
|
|
other_revocation_pubkey = derive_blinded_pubkey(other_conf.revocation_basepoint.pubkey, pcp)
|
|
|
|
|
other_htlc_pubkey = derive_pubkey(other_conf.htlc_basepoint.pubkey, pcp)
|
|
|
|
|
htlc_pubkey = derive_pubkey(conf.htlc_basepoint.pubkey, pcp)
|
|
|
|
|
# HTLC-success for the HTLC spending from a received HTLC output
|
|
|
|
|
# if we do not receive, and the commitment tx is not for us, they receive, so it is also an HTLC-success
|
2019-09-07 07:37:13 +02:00
|
|
|
is_htlc_success = htlc_direction == RECEIVED
|
|
|
|
|
witness_script_of_htlc_tx_output, htlc_tx_output = make_htlc_tx_output(
|
2021-09-13 13:51:51 +02:00
|
|
|
amount_msat=amount_msat,
|
|
|
|
|
local_feerate=chan.get_feerate(subject, ctn=ctn),
|
2018-10-23 16:44:39 +02:00
|
|
|
revocationpubkey=other_revocation_pubkey,
|
2018-06-28 15:50:45 +02:00
|
|
|
local_delayedpubkey=delayedpubkey,
|
2021-09-13 13:51:51 +02:00
|
|
|
success=is_htlc_success,
|
|
|
|
|
to_self_delay=other_conf.to_self_delay,
|
|
|
|
|
has_anchors=chan.has_anchors(),
|
|
|
|
|
)
|
2024-05-24 13:40:48 +00:00
|
|
|
witness_script_in = make_htlc_output_witness_script(
|
|
|
|
|
is_received_htlc=is_htlc_success,
|
|
|
|
|
remote_revocation_pubkey=other_revocation_pubkey,
|
|
|
|
|
remote_htlc_pubkey=other_htlc_pubkey,
|
|
|
|
|
local_htlc_pubkey=htlc_pubkey,
|
|
|
|
|
payment_hash=payment_hash,
|
|
|
|
|
cltv_abs=cltv_abs,
|
2021-09-13 13:51:51 +02:00
|
|
|
has_anchors=chan.has_anchors(),
|
2024-05-24 13:40:48 +00:00
|
|
|
)
|
2018-06-28 15:50:45 +02:00
|
|
|
htlc_tx_inputs = make_htlc_tx_inputs(
|
2019-09-07 07:37:13 +02:00
|
|
|
commit.txid(), ctx_output_idx,
|
2018-06-28 15:50:45 +02:00
|
|
|
amount_msat=amount_msat,
|
2024-05-24 13:40:48 +00:00
|
|
|
witness_script=witness_script_in)
|
2021-09-13 13:51:51 +02:00
|
|
|
if chan.has_anchors():
|
|
|
|
|
htlc_tx_inputs[0].nsequence = 1
|
2018-06-28 15:50:45 +02:00
|
|
|
if is_htlc_success:
|
2023-10-19 16:33:04 +00:00
|
|
|
cltv_abs = 0
|
|
|
|
|
htlc_tx = make_htlc_tx(cltv_abs=cltv_abs, inputs=htlc_tx_inputs, output=htlc_tx_output)
|
2019-09-07 07:37:13 +02:00
|
|
|
return witness_script_of_htlc_tx_output, htlc_tx
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-09-21 19:18:34 +02:00
|
|
|
def make_funding_input(local_funding_pubkey: bytes, remote_funding_pubkey: bytes,
|
2019-10-23 17:09:41 +02:00
|
|
|
funding_pos: int, funding_txid: str, funding_sat: int) -> PartialTxInput:
|
2023-02-17 11:35:03 +00:00
|
|
|
pubkeys = sorted([local_funding_pubkey.hex(), remote_funding_pubkey.hex()])
|
2018-06-28 15:50:45 +02:00
|
|
|
# commitment tx input
|
2019-10-23 17:09:41 +02:00
|
|
|
prevout = TxOutpoint(txid=bfh(funding_txid), out_idx=funding_pos)
|
|
|
|
|
c_input = PartialTxInput(prevout=prevout)
|
2023-02-26 12:14:25 +00:00
|
|
|
|
|
|
|
|
ppubkeys = [descriptor.PubkeyProvider.parse(pk) for pk in pubkeys]
|
|
|
|
|
multi = descriptor.MultisigDescriptor(pubkeys=ppubkeys, thresh=2, is_sorted=True)
|
|
|
|
|
c_input.script_descriptor = descriptor.WSHDescriptor(subdescriptor=multi)
|
2019-10-23 17:09:41 +02:00
|
|
|
c_input._trusted_value_sats = funding_sat
|
2018-10-22 20:39:44 +02:00
|
|
|
return c_input
|
2018-09-21 19:18:34 +02:00
|
|
|
|
2022-06-29 18:07:03 +02:00
|
|
|
|
|
|
|
|
class HTLCOwner(IntEnum):
|
2018-10-15 18:36:13 +02:00
|
|
|
LOCAL = 1
|
|
|
|
|
REMOTE = -LOCAL
|
|
|
|
|
|
2020-06-15 15:42:56 +02:00
|
|
|
def inverted(self) -> 'HTLCOwner':
|
|
|
|
|
return -self
|
|
|
|
|
|
|
|
|
|
def __neg__(self) -> 'HTLCOwner':
|
|
|
|
|
return HTLCOwner(super().__neg__())
|
|
|
|
|
|
2019-01-21 21:27:27 +01:00
|
|
|
|
2022-06-29 18:07:03 +02:00
|
|
|
class Direction(IntEnum):
|
2020-03-27 19:06:30 +01:00
|
|
|
SENT = -1 # in the context of HTLCs: "offered" HTLCs
|
|
|
|
|
RECEIVED = 1 # in the context of HTLCs: "received" HTLCs
|
2019-01-21 21:27:27 +01:00
|
|
|
|
|
|
|
|
SENT = Direction.SENT
|
|
|
|
|
RECEIVED = Direction.RECEIVED
|
2018-10-15 18:36:13 +02:00
|
|
|
|
|
|
|
|
LOCAL = HTLCOwner.LOCAL
|
|
|
|
|
REMOTE = HTLCOwner.REMOTE
|
|
|
|
|
|
2020-06-15 15:42:56 +02:00
|
|
|
|
2021-09-13 13:41:01 +02:00
|
|
|
def make_commitment_outputs(
|
|
|
|
|
*,
|
|
|
|
|
fees_per_participant: Mapping[HTLCOwner, int],
|
|
|
|
|
local_amount_msat: int,
|
|
|
|
|
remote_amount_msat: int,
|
|
|
|
|
local_script: bytes,
|
|
|
|
|
remote_script: bytes,
|
|
|
|
|
htlcs: List[ScriptHtlc],
|
|
|
|
|
dust_limit_sat: int,
|
|
|
|
|
has_anchors: bool,
|
|
|
|
|
local_anchor_script: Optional[str],
|
|
|
|
|
remote_anchor_script: Optional[str]
|
|
|
|
|
) -> Tuple[List[PartialTxOutput], List[PartialTxOutput]]:
|
|
|
|
|
|
|
|
|
|
# determine HTLC outputs and trim below dust to know if anchors need to be included
|
2018-09-25 17:08:46 +02:00
|
|
|
htlc_outputs = []
|
|
|
|
|
for script, htlc in htlcs:
|
2024-04-26 20:09:00 +00:00
|
|
|
addr = bitcoin.redeem_script_to_address('p2wsh', script)
|
2021-09-13 13:41:01 +02:00
|
|
|
if htlc.amount_msat // 1000 > dust_limit_sat:
|
|
|
|
|
htlc_outputs.append(
|
|
|
|
|
PartialTxOutput(
|
|
|
|
|
scriptpubkey=address_to_script(addr),
|
|
|
|
|
value=htlc.amount_msat // 1000
|
|
|
|
|
))
|
|
|
|
|
|
|
|
|
|
# BOLT-03: "Base commitment transaction fees are extracted from the funder's amount;
|
|
|
|
|
# if that amount is insufficient, the entire amount of the funder's output is used."
|
|
|
|
|
non_htlc_outputs = []
|
|
|
|
|
to_local_amt_msat = local_amount_msat - fees_per_participant[LOCAL]
|
|
|
|
|
to_remote_amt_msat = remote_amount_msat - fees_per_participant[REMOTE]
|
|
|
|
|
|
|
|
|
|
anchor_outputs = []
|
|
|
|
|
# if no anchor scripts are set, we ignore anchor outputs, useful when this
|
|
|
|
|
# function is used to determine outputs for a collaborative close
|
|
|
|
|
if has_anchors and local_anchor_script and remote_anchor_script:
|
|
|
|
|
local_pays_anchors = bool(fees_per_participant[LOCAL])
|
|
|
|
|
# we always allocate for two anchor outputs even if they are not added
|
|
|
|
|
if local_pays_anchors:
|
|
|
|
|
to_local_amt_msat -= 2 * FIXED_ANCHOR_SAT * 1000
|
|
|
|
|
else:
|
|
|
|
|
to_remote_amt_msat -= 2 * FIXED_ANCHOR_SAT * 1000
|
|
|
|
|
|
|
|
|
|
# include anchors for outputs that materialize, include both if there are HTLCs present
|
|
|
|
|
if to_local_amt_msat // 1000 >= dust_limit_sat or htlc_outputs:
|
|
|
|
|
anchor_outputs.append(PartialTxOutput(scriptpubkey=local_anchor_script, value=FIXED_ANCHOR_SAT))
|
|
|
|
|
if to_remote_amt_msat // 1000 >= dust_limit_sat or htlc_outputs:
|
|
|
|
|
anchor_outputs.append(PartialTxOutput(scriptpubkey=remote_anchor_script, value=FIXED_ANCHOR_SAT))
|
|
|
|
|
|
|
|
|
|
# if funder cannot afford feerate, their output might go negative, so take max(0, x) here
|
|
|
|
|
to_local_amt_msat = max(0, to_local_amt_msat)
|
|
|
|
|
to_remote_amt_msat = max(0, to_remote_amt_msat)
|
|
|
|
|
non_htlc_outputs.append(PartialTxOutput(scriptpubkey=local_script, value=to_local_amt_msat // 1000))
|
|
|
|
|
non_htlc_outputs.append(PartialTxOutput(scriptpubkey=remote_script, value=to_remote_amt_msat // 1000))
|
2018-06-28 15:50:45 +02:00
|
|
|
|
2018-09-25 17:08:46 +02:00
|
|
|
c_outputs_filtered = list(filter(lambda x: x.value >= dust_limit_sat, non_htlc_outputs + htlc_outputs))
|
2021-09-13 13:41:01 +02:00
|
|
|
c_outputs = c_outputs_filtered + anchor_outputs
|
|
|
|
|
return htlc_outputs, c_outputs
|
2018-09-21 19:18:34 +02:00
|
|
|
|
2020-03-26 05:43:26 +01:00
|
|
|
|
2021-09-13 13:51:51 +02:00
|
|
|
def effective_htlc_tx_weight(success: bool, has_anchors: bool):
|
|
|
|
|
# for anchors-zero-fee-htlc we set an effective weight of zero
|
|
|
|
|
# we only trim htlcs below dust, as in the anchors commitment format,
|
|
|
|
|
# the fees for the hltc transaction don't need to be subtracted from
|
|
|
|
|
# the htlc output, but fees are taken from extra attached inputs
|
|
|
|
|
if has_anchors:
|
2021-10-15 11:08:10 +02:00
|
|
|
return 0 * HTLC_SUCCESS_WEIGHT_ANCHORS if success else 0 * HTLC_TIMEOUT_WEIGHT_ANCHORS
|
2021-09-13 13:51:51 +02:00
|
|
|
else:
|
|
|
|
|
return HTLC_SUCCESS_WEIGHT if success else HTLC_TIMEOUT_WEIGHT
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def offered_htlc_trim_threshold_sat(*, dust_limit_sat: int, feerate: int, has_anchors: bool) -> int:
|
2020-03-30 01:53:34 +02:00
|
|
|
# offered htlcs strictly below this amount will be trimmed (from ctx).
|
|
|
|
|
# feerate is in sat/kw
|
|
|
|
|
# returns value in sat
|
2021-09-13 13:51:51 +02:00
|
|
|
weight = effective_htlc_tx_weight(success=False, has_anchors=has_anchors)
|
2020-03-30 01:53:34 +02:00
|
|
|
return dust_limit_sat + weight * feerate // 1000
|
|
|
|
|
|
|
|
|
|
|
2021-09-13 13:51:51 +02:00
|
|
|
def received_htlc_trim_threshold_sat(*, dust_limit_sat: int, feerate: int, has_anchors: bool) -> int:
|
2020-03-30 01:53:34 +02:00
|
|
|
# received htlcs strictly below this amount will be trimmed (from ctx).
|
|
|
|
|
# feerate is in sat/kw
|
|
|
|
|
# returns value in sat
|
2021-09-13 13:51:51 +02:00
|
|
|
weight = effective_htlc_tx_weight(success=True, has_anchors=has_anchors)
|
2020-03-30 01:53:34 +02:00
|
|
|
return dust_limit_sat + weight * feerate // 1000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def fee_for_htlc_output(*, feerate: int) -> int:
|
|
|
|
|
# feerate is in sat/kw
|
|
|
|
|
# returns fee in msat
|
|
|
|
|
return feerate * HTLC_OUTPUT_WEIGHT
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def calc_fees_for_commitment_tx(*, num_htlcs: int, feerate: int,
|
2021-09-13 11:42:55 +02:00
|
|
|
is_local_initiator: bool, round_to_sat: bool = True, has_anchors: bool) -> Dict['HTLCOwner', int]:
|
2020-03-26 05:43:26 +01:00
|
|
|
# feerate is in sat/kw
|
|
|
|
|
# returns fees in msats
|
2020-03-30 01:53:34 +02:00
|
|
|
# note: BOLT-02 specifies that msat fees need to be rounded down to sat.
|
|
|
|
|
# However, the rounding needs to happen for the total fees, so if the return value
|
|
|
|
|
# is to be used as part of additional fee calculation then rounding should be done after that.
|
2021-09-13 11:42:55 +02:00
|
|
|
if has_anchors:
|
|
|
|
|
commitment_tx_weight = COMMITMENT_TX_WEIGHT_ANCHORS
|
|
|
|
|
else:
|
|
|
|
|
commitment_tx_weight = COMMITMENT_TX_WEIGHT
|
|
|
|
|
overall_weight = commitment_tx_weight + num_htlcs * HTLC_OUTPUT_WEIGHT
|
2018-10-15 18:36:13 +02:00
|
|
|
fee = feerate * overall_weight
|
2020-03-30 01:53:34 +02:00
|
|
|
if round_to_sat:
|
|
|
|
|
fee = fee // 1000 * 1000
|
2020-03-26 05:43:26 +01:00
|
|
|
return {
|
|
|
|
|
LOCAL: fee if is_local_initiator else 0,
|
|
|
|
|
REMOTE: fee if not is_local_initiator else 0,
|
|
|
|
|
}
|
2018-09-21 19:18:34 +02:00
|
|
|
|
2020-03-30 03:49:50 +02:00
|
|
|
|
|
|
|
|
def make_commitment(
|
|
|
|
|
*,
|
|
|
|
|
ctn: int,
|
|
|
|
|
local_funding_pubkey: bytes,
|
|
|
|
|
remote_funding_pubkey: bytes,
|
|
|
|
|
remote_payment_pubkey: bytes,
|
|
|
|
|
funder_payment_basepoint: bytes,
|
|
|
|
|
fundee_payment_basepoint: bytes,
|
|
|
|
|
revocation_pubkey: bytes,
|
|
|
|
|
delayed_pubkey: bytes,
|
|
|
|
|
to_self_delay: int,
|
|
|
|
|
funding_txid: str,
|
|
|
|
|
funding_pos: int,
|
|
|
|
|
funding_sat: int,
|
|
|
|
|
local_amount: int,
|
|
|
|
|
remote_amount: int,
|
|
|
|
|
dust_limit_sat: int,
|
|
|
|
|
fees_per_participant: Mapping[HTLCOwner, int],
|
2021-09-13 13:41:01 +02:00
|
|
|
htlcs: List[ScriptHtlc],
|
|
|
|
|
has_anchors: bool
|
2020-03-30 03:49:50 +02:00
|
|
|
) -> PartialTransaction:
|
2018-10-22 20:39:44 +02:00
|
|
|
c_input = make_funding_input(local_funding_pubkey, remote_funding_pubkey,
|
|
|
|
|
funding_pos, funding_txid, funding_sat)
|
|
|
|
|
obs = get_obscured_ctn(ctn, funder_payment_basepoint, fundee_payment_basepoint)
|
2018-09-21 19:18:34 +02:00
|
|
|
locktime = (0x20 << 24) + (obs & 0xffffff)
|
|
|
|
|
sequence = (0x80 << 24) + (obs >> 24)
|
2019-10-23 17:09:41 +02:00
|
|
|
c_input.nsequence = sequence
|
2018-09-21 19:18:34 +02:00
|
|
|
|
|
|
|
|
c_inputs = [c_input]
|
|
|
|
|
|
|
|
|
|
# commitment tx outputs
|
|
|
|
|
local_address = make_commitment_output_to_local_address(revocation_pubkey, to_self_delay, delayed_pubkey)
|
2021-09-13 13:41:01 +02:00
|
|
|
remote_address = make_commitment_output_to_remote_address(remote_payment_pubkey, has_anchors)
|
|
|
|
|
local_anchor_address = None
|
|
|
|
|
remote_anchor_address = None
|
|
|
|
|
if has_anchors:
|
|
|
|
|
local_anchor_address = make_commitment_output_to_anchor_address(local_funding_pubkey)
|
|
|
|
|
remote_anchor_address = make_commitment_output_to_anchor_address(remote_funding_pubkey)
|
2020-03-30 03:49:50 +02:00
|
|
|
# note: it is assumed that the given 'htlcs' are all non-dust (dust htlcs already trimmed)
|
2018-09-21 19:18:34 +02:00
|
|
|
|
2019-08-04 04:55:23 +02:00
|
|
|
# BOLT-03: "Transaction Input and Output Ordering
|
|
|
|
|
# Lexicographic ordering: see BIP69. In the case of identical HTLC outputs,
|
|
|
|
|
# the outputs are ordered in increasing cltv_expiry order."
|
|
|
|
|
# so we sort by cltv_expiry now; and the later BIP69-sort is assumed to be *stable*
|
|
|
|
|
htlcs = list(htlcs)
|
2023-10-19 16:33:04 +00:00
|
|
|
htlcs.sort(key=lambda x: x.htlc.cltv_abs)
|
2019-08-04 04:55:23 +02:00
|
|
|
|
2019-10-23 17:09:41 +02:00
|
|
|
htlc_outputs, c_outputs_filtered = make_commitment_outputs(
|
|
|
|
|
fees_per_participant=fees_per_participant,
|
|
|
|
|
local_amount_msat=local_amount,
|
|
|
|
|
remote_amount_msat=remote_amount,
|
|
|
|
|
local_script=address_to_script(local_address),
|
|
|
|
|
remote_script=address_to_script(remote_address),
|
|
|
|
|
htlcs=htlcs,
|
2021-09-13 13:41:01 +02:00
|
|
|
dust_limit_sat=dust_limit_sat,
|
|
|
|
|
has_anchors=has_anchors,
|
|
|
|
|
local_anchor_script=address_to_script(local_anchor_address) if local_anchor_address else None,
|
|
|
|
|
remote_anchor_script=address_to_script(remote_anchor_address) if remote_anchor_address else None
|
|
|
|
|
)
|
2018-09-21 19:18:34 +02:00
|
|
|
|
2019-01-21 21:27:27 +01:00
|
|
|
assert sum(x.value for x in c_outputs_filtered) <= funding_sat, (c_outputs_filtered, funding_sat)
|
2018-06-28 15:50:45 +02:00
|
|
|
|
|
|
|
|
# create commitment tx
|
2019-10-23 17:09:41 +02:00
|
|
|
tx = PartialTransaction.from_io(c_inputs, c_outputs_filtered, locktime=locktime, version=2)
|
2018-06-28 15:50:45 +02:00
|
|
|
return tx
|
|
|
|
|
|
2018-07-09 00:15:55 +02:00
|
|
|
def make_commitment_output_to_local_witness_script(
|
2021-11-01 18:05:33 +01:00
|
|
|
revocation_pubkey: bytes, to_self_delay: int, delayed_pubkey: bytes,
|
|
|
|
|
) -> bytes:
|
|
|
|
|
assert type(revocation_pubkey) is bytes
|
|
|
|
|
assert type(to_self_delay) is int
|
|
|
|
|
assert type(delayed_pubkey) is bytes
|
2024-04-26 20:09:00 +00:00
|
|
|
script = construct_script([
|
2020-10-24 06:08:34 +02:00
|
|
|
opcodes.OP_IF,
|
|
|
|
|
revocation_pubkey,
|
|
|
|
|
opcodes.OP_ELSE,
|
|
|
|
|
to_self_delay,
|
|
|
|
|
opcodes.OP_CHECKSEQUENCEVERIFY,
|
|
|
|
|
opcodes.OP_DROP,
|
|
|
|
|
delayed_pubkey,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
2024-04-26 20:09:00 +00:00
|
|
|
])
|
2020-10-24 06:08:34 +02:00
|
|
|
return script
|
2018-07-09 00:15:55 +02:00
|
|
|
|
|
|
|
|
def make_commitment_output_to_local_address(
|
|
|
|
|
revocation_pubkey: bytes, to_self_delay: int, delayed_pubkey: bytes) -> str:
|
|
|
|
|
local_script = make_commitment_output_to_local_witness_script(revocation_pubkey, to_self_delay, delayed_pubkey)
|
2024-04-26 20:09:00 +00:00
|
|
|
return bitcoin.redeem_script_to_address('p2wsh', local_script)
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2021-09-13 13:41:01 +02:00
|
|
|
def make_commitment_output_to_remote_witness_script(remote_payment_pubkey: bytes) -> bytes:
|
|
|
|
|
assert isinstance(remote_payment_pubkey, bytes)
|
|
|
|
|
script = construct_script([
|
|
|
|
|
remote_payment_pubkey,
|
|
|
|
|
opcodes.OP_CHECKSIGVERIFY,
|
|
|
|
|
opcodes.OP_1,
|
|
|
|
|
opcodes.OP_CHECKSEQUENCEVERIFY,
|
|
|
|
|
])
|
|
|
|
|
return script
|
|
|
|
|
|
|
|
|
|
def make_commitment_output_to_remote_address(remote_payment_pubkey: bytes, has_anchors: bool) -> str:
|
|
|
|
|
if has_anchors:
|
|
|
|
|
remote_script = make_commitment_output_to_remote_witness_script(remote_payment_pubkey)
|
2024-11-20 12:05:05 +01:00
|
|
|
return bitcoin.redeem_script_to_address('p2wsh', remote_script)
|
2021-09-13 13:41:01 +02:00
|
|
|
else:
|
|
|
|
|
return bitcoin.pubkey_to_address('p2wpkh', remote_payment_pubkey.hex())
|
|
|
|
|
|
|
|
|
|
def make_commitment_output_to_anchor_witness_script(funding_pubkey: bytes) -> bytes:
|
|
|
|
|
assert isinstance(funding_pubkey, bytes)
|
|
|
|
|
script = construct_script([
|
|
|
|
|
funding_pubkey,
|
|
|
|
|
opcodes.OP_CHECKSIG,
|
|
|
|
|
opcodes.OP_IFDUP,
|
|
|
|
|
opcodes.OP_NOTIF,
|
|
|
|
|
opcodes.OP_16,
|
|
|
|
|
opcodes.OP_CHECKSEQUENCEVERIFY,
|
|
|
|
|
opcodes.OP_ENDIF,
|
|
|
|
|
])
|
|
|
|
|
return script
|
|
|
|
|
|
|
|
|
|
def make_commitment_output_to_anchor_address(funding_pubkey: bytes) -> str:
|
|
|
|
|
script = make_commitment_output_to_anchor_witness_script(funding_pubkey)
|
2024-11-20 12:05:05 +01:00
|
|
|
return bitcoin.redeem_script_to_address('p2wsh', script)
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2019-10-23 17:09:41 +02:00
|
|
|
def sign_and_get_sig_string(tx: PartialTransaction, local_config, remote_config):
|
2024-04-29 16:32:19 +00:00
|
|
|
tx.sign({local_config.multisig_key.pubkey: local_config.multisig_key.privkey})
|
2024-05-23 16:09:59 +00:00
|
|
|
sig = tx.inputs()[0].sigs_ecdsa[local_config.multisig_key.pubkey]
|
2024-04-11 15:13:41 +00:00
|
|
|
sig_64 = ecdsa_sig64_from_der_sig(sig[:-1])
|
2018-06-28 15:50:45 +02:00
|
|
|
return sig_64
|
|
|
|
|
|
2024-04-26 20:09:00 +00:00
|
|
|
def funding_output_script(local_config: 'LocalConfig', remote_config: 'RemoteConfig') -> bytes:
|
2018-07-23 20:49:44 +02:00
|
|
|
return funding_output_script_from_keys(local_config.multisig_key.pubkey, remote_config.multisig_key.pubkey)
|
|
|
|
|
|
2024-04-26 20:09:00 +00:00
|
|
|
def funding_output_script_from_keys(pubkey1: bytes, pubkey2: bytes) -> bytes:
|
2023-02-17 11:35:03 +00:00
|
|
|
pubkeys = sorted([pubkey1.hex(), pubkey2.hex()])
|
2018-06-28 15:50:45 +02:00
|
|
|
return transaction.multisig_script(pubkeys, 2)
|
|
|
|
|
|
2018-07-23 20:49:44 +02:00
|
|
|
|
2018-10-04 16:38:59 +02:00
|
|
|
def get_obscured_ctn(ctn: int, funder: bytes, fundee: bytes) -> int:
|
|
|
|
|
mask = int.from_bytes(sha256(funder + fundee)[-6:], 'big')
|
2018-06-28 15:50:45 +02:00
|
|
|
return ctn ^ mask
|
|
|
|
|
|
2019-10-23 17:09:41 +02:00
|
|
|
def extract_ctn_from_tx(tx: Transaction, txin_index: int, funder_payment_basepoint: bytes,
|
2018-10-04 16:38:59 +02:00
|
|
|
fundee_payment_basepoint: bytes) -> int:
|
2018-07-09 00:15:55 +02:00
|
|
|
tx.deserialize()
|
|
|
|
|
locktime = tx.locktime
|
2019-10-23 17:09:41 +02:00
|
|
|
sequence = tx.inputs()[txin_index].nsequence
|
2018-07-09 00:15:55 +02:00
|
|
|
obs = ((sequence & 0xffffff) << 24) + (locktime & 0xffffff)
|
2018-10-04 16:38:59 +02:00
|
|
|
return get_obscured_ctn(obs, funder_payment_basepoint, fundee_payment_basepoint)
|
2018-07-09 00:15:55 +02:00
|
|
|
|
2020-04-13 15:57:53 +02:00
|
|
|
def extract_ctn_from_tx_and_chan(tx: Transaction, chan: 'AbstractChannel') -> int:
|
2020-03-13 11:44:29 +01:00
|
|
|
funder_conf = chan.config[LOCAL] if chan.is_initiator() else chan.config[REMOTE]
|
|
|
|
|
fundee_conf = chan.config[LOCAL] if not chan.is_initiator() else chan.config[REMOTE]
|
2018-09-12 16:17:10 +02:00
|
|
|
return extract_ctn_from_tx(tx, txin_index=0,
|
2018-10-04 16:38:59 +02:00
|
|
|
funder_payment_basepoint=funder_conf.payment_basepoint.pubkey,
|
|
|
|
|
fundee_payment_basepoint=fundee_conf.payment_basepoint.pubkey)
|
2018-09-12 16:17:10 +02:00
|
|
|
|
2021-09-15 09:47:58 +02:00
|
|
|
def ctx_has_anchors(tx: Transaction):
|
|
|
|
|
output_values = [output.value for output in tx.outputs()]
|
|
|
|
|
if FIXED_ANCHOR_SAT in output_values:
|
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2018-07-26 21:08:25 +02:00
|
|
|
|
2020-03-16 22:07:00 +01:00
|
|
|
class LnFeatureContexts(enum.Flag):
|
|
|
|
|
INIT = enum.auto()
|
|
|
|
|
NODE_ANN = enum.auto()
|
|
|
|
|
CHAN_ANN_AS_IS = enum.auto()
|
|
|
|
|
CHAN_ANN_ALWAYS_ODD = enum.auto()
|
|
|
|
|
CHAN_ANN_ALWAYS_EVEN = enum.auto()
|
|
|
|
|
INVOICE = enum.auto()
|
|
|
|
|
|
|
|
|
|
LNFC = LnFeatureContexts
|
|
|
|
|
|
|
|
|
|
_ln_feature_direct_dependencies = defaultdict(set) # type: Dict[LnFeatures, Set[LnFeatures]]
|
|
|
|
|
_ln_feature_contexts = {} # type: Dict[LnFeatures, LnFeatureContexts]
|
|
|
|
|
|
|
|
|
|
class LnFeatures(IntFlag):
|
2018-10-09 20:10:26 +02:00
|
|
|
OPTION_DATA_LOSS_PROTECT_REQ = 1 << 0
|
|
|
|
|
OPTION_DATA_LOSS_PROTECT_OPT = 1 << 1
|
2020-03-16 22:07:00 +01:00
|
|
|
_ln_feature_contexts[OPTION_DATA_LOSS_PROTECT_OPT] = (LNFC.INIT | LnFeatureContexts.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_DATA_LOSS_PROTECT_REQ] = (LNFC.INIT | LnFeatureContexts.NODE_ANN)
|
|
|
|
|
|
2018-10-09 20:10:26 +02:00
|
|
|
INITIAL_ROUTING_SYNC = 1 << 3
|
2020-03-16 22:07:00 +01:00
|
|
|
_ln_feature_contexts[INITIAL_ROUTING_SYNC] = LNFC.INIT
|
|
|
|
|
|
2018-10-09 20:10:26 +02:00
|
|
|
OPTION_UPFRONT_SHUTDOWN_SCRIPT_REQ = 1 << 4
|
|
|
|
|
OPTION_UPFRONT_SHUTDOWN_SCRIPT_OPT = 1 << 5
|
2020-03-16 22:07:00 +01:00
|
|
|
_ln_feature_contexts[OPTION_UPFRONT_SHUTDOWN_SCRIPT_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_UPFRONT_SHUTDOWN_SCRIPT_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2018-10-09 20:10:26 +02:00
|
|
|
GOSSIP_QUERIES_REQ = 1 << 6
|
|
|
|
|
GOSSIP_QUERIES_OPT = 1 << 7
|
2020-03-16 22:07:00 +01:00
|
|
|
_ln_feature_contexts[GOSSIP_QUERIES_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[GOSSIP_QUERIES_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
|
|
|
|
VAR_ONION_REQ = 1 << 8
|
|
|
|
|
VAR_ONION_OPT = 1 << 9
|
|
|
|
|
_ln_feature_contexts[VAR_ONION_OPT] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
_ln_feature_contexts[VAR_ONION_REQ] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
|
|
|
|
|
GOSSIP_QUERIES_EX_REQ = 1 << 10
|
|
|
|
|
GOSSIP_QUERIES_EX_OPT = 1 << 11
|
|
|
|
|
_ln_feature_direct_dependencies[GOSSIP_QUERIES_EX_OPT] = {GOSSIP_QUERIES_OPT}
|
|
|
|
|
_ln_feature_contexts[GOSSIP_QUERIES_EX_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[GOSSIP_QUERIES_EX_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2019-12-13 14:07:11 +01:00
|
|
|
OPTION_STATIC_REMOTEKEY_REQ = 1 << 12
|
|
|
|
|
OPTION_STATIC_REMOTEKEY_OPT = 1 << 13
|
2020-03-16 22:07:00 +01:00
|
|
|
_ln_feature_contexts[OPTION_STATIC_REMOTEKEY_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_STATIC_REMOTEKEY_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
|
|
|
|
PAYMENT_SECRET_REQ = 1 << 14
|
|
|
|
|
PAYMENT_SECRET_OPT = 1 << 15
|
|
|
|
|
_ln_feature_direct_dependencies[PAYMENT_SECRET_OPT] = {VAR_ONION_OPT}
|
|
|
|
|
_ln_feature_contexts[PAYMENT_SECRET_OPT] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
_ln_feature_contexts[PAYMENT_SECRET_REQ] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
|
|
|
|
|
BASIC_MPP_REQ = 1 << 16
|
|
|
|
|
BASIC_MPP_OPT = 1 << 17
|
|
|
|
|
_ln_feature_direct_dependencies[BASIC_MPP_OPT] = {PAYMENT_SECRET_OPT}
|
|
|
|
|
_ln_feature_contexts[BASIC_MPP_OPT] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
_ln_feature_contexts[BASIC_MPP_REQ] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
|
|
|
|
|
OPTION_SUPPORT_LARGE_CHANNEL_REQ = 1 << 18
|
|
|
|
|
OPTION_SUPPORT_LARGE_CHANNEL_OPT = 1 << 19
|
2021-02-22 20:01:45 +01:00
|
|
|
_ln_feature_contexts[OPTION_SUPPORT_LARGE_CHANNEL_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_SUPPORT_LARGE_CHANNEL_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
2020-03-16 22:07:00 +01:00
|
|
|
|
2021-09-13 11:00:12 +02:00
|
|
|
OPTION_ANCHOR_OUTPUTS_REQ = 1 << 20
|
|
|
|
|
OPTION_ANCHOR_OUTPUTS_OPT = 1 << 21
|
|
|
|
|
_ln_feature_direct_dependencies[OPTION_ANCHOR_OUTPUTS_OPT] = {OPTION_STATIC_REMOTEKEY_OPT}
|
|
|
|
|
_ln_feature_contexts[OPTION_ANCHOR_OUTPUTS_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_ANCHOR_OUTPUTS_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
|
|
|
|
OPTION_ANCHORS_ZERO_FEE_HTLC_REQ = 1 << 22
|
|
|
|
|
OPTION_ANCHORS_ZERO_FEE_HTLC_OPT = 1 << 23
|
|
|
|
|
_ln_feature_direct_dependencies[OPTION_ANCHORS_ZERO_FEE_HTLC_OPT] = {OPTION_STATIC_REMOTEKEY_OPT}
|
|
|
|
|
_ln_feature_contexts[OPTION_ANCHORS_ZERO_FEE_HTLC_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_ANCHORS_ZERO_FEE_HTLC_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2023-01-13 12:46:55 +01:00
|
|
|
# Temporary number.
|
|
|
|
|
OPTION_TRAMPOLINE_ROUTING_REQ_ECLAIR = 1 << 148
|
|
|
|
|
OPTION_TRAMPOLINE_ROUTING_OPT_ECLAIR = 1 << 149
|
2021-03-08 18:49:52 +01:00
|
|
|
|
2023-01-13 12:46:55 +01:00
|
|
|
_ln_feature_contexts[OPTION_TRAMPOLINE_ROUTING_REQ_ECLAIR] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
_ln_feature_contexts[OPTION_TRAMPOLINE_ROUTING_OPT_ECLAIR] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
|
|
|
|
|
# We use a different bit because Phoenix cannot do end-to-end multi-trampoline routes
|
|
|
|
|
OPTION_TRAMPOLINE_ROUTING_REQ_ELECTRUM = 1 << 150
|
|
|
|
|
OPTION_TRAMPOLINE_ROUTING_OPT_ELECTRUM = 1 << 151
|
|
|
|
|
|
|
|
|
|
_ln_feature_contexts[OPTION_TRAMPOLINE_ROUTING_REQ_ELECTRUM] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
|
|
|
|
_ln_feature_contexts[OPTION_TRAMPOLINE_ROUTING_OPT_ELECTRUM] = (LNFC.INIT | LNFC.NODE_ANN | LNFC.INVOICE)
|
2021-03-08 18:49:52 +01:00
|
|
|
|
2021-10-22 11:59:44 +02:00
|
|
|
OPTION_SHUTDOWN_ANYSEGWIT_REQ = 1 << 26
|
|
|
|
|
OPTION_SHUTDOWN_ANYSEGWIT_OPT = 1 << 27
|
|
|
|
|
|
|
|
|
|
_ln_feature_contexts[OPTION_SHUTDOWN_ANYSEGWIT_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_SHUTDOWN_ANYSEGWIT_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2022-01-18 14:40:16 +01:00
|
|
|
OPTION_CHANNEL_TYPE_REQ = 1 << 44
|
|
|
|
|
OPTION_CHANNEL_TYPE_OPT = 1 << 45
|
|
|
|
|
|
|
|
|
|
_ln_feature_contexts[OPTION_CHANNEL_TYPE_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_CHANNEL_TYPE_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2023-01-07 12:20:03 +01:00
|
|
|
OPTION_SCID_ALIAS_REQ = 1 << 46
|
|
|
|
|
OPTION_SCID_ALIAS_OPT = 1 << 47
|
|
|
|
|
|
|
|
|
|
_ln_feature_contexts[OPTION_SCID_ALIAS_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_SCID_ALIAS_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2023-08-08 05:09:58 +02:00
|
|
|
OPTION_ZEROCONF_REQ = 1 << 50
|
|
|
|
|
OPTION_ZEROCONF_OPT = 1 << 51
|
|
|
|
|
|
|
|
|
|
_ln_feature_direct_dependencies[OPTION_ZEROCONF_OPT] = {OPTION_SCID_ALIAS_OPT}
|
|
|
|
|
_ln_feature_contexts[OPTION_ZEROCONF_REQ] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
_ln_feature_contexts[OPTION_ZEROCONF_OPT] = (LNFC.INIT | LNFC.NODE_ANN)
|
|
|
|
|
|
2020-11-20 08:51:01 +01:00
|
|
|
def validate_transitive_dependencies(self) -> bool:
|
2020-03-16 22:07:00 +01:00
|
|
|
# for all even bit set, set corresponding odd bit:
|
|
|
|
|
features = self # copy
|
|
|
|
|
flags = list_enabled_bits(features)
|
|
|
|
|
for flag in flags:
|
|
|
|
|
if flag % 2 == 0:
|
|
|
|
|
features |= 1 << get_ln_flag_pair_of_bit(flag)
|
|
|
|
|
# Check dependencies. We only check that the direct dependencies of each flag set
|
|
|
|
|
# are satisfied: this implies that transitive dependencies are also satisfied.
|
|
|
|
|
flags = list_enabled_bits(features)
|
|
|
|
|
for flag in flags:
|
|
|
|
|
for dependency in _ln_feature_direct_dependencies[1 << flag]:
|
|
|
|
|
if not (dependency & features):
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
def for_init_message(self) -> 'LnFeatures':
|
|
|
|
|
features = LnFeatures(0)
|
|
|
|
|
for flag in list_enabled_bits(self):
|
|
|
|
|
if LnFeatureContexts.INIT & _ln_feature_contexts[1 << flag]:
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
return features
|
|
|
|
|
|
|
|
|
|
def for_node_announcement(self) -> 'LnFeatures':
|
|
|
|
|
features = LnFeatures(0)
|
|
|
|
|
for flag in list_enabled_bits(self):
|
|
|
|
|
if LnFeatureContexts.NODE_ANN & _ln_feature_contexts[1 << flag]:
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
return features
|
|
|
|
|
|
|
|
|
|
def for_invoice(self) -> 'LnFeatures':
|
|
|
|
|
features = LnFeatures(0)
|
|
|
|
|
for flag in list_enabled_bits(self):
|
|
|
|
|
if LnFeatureContexts.INVOICE & _ln_feature_contexts[1 << flag]:
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
return features
|
|
|
|
|
|
|
|
|
|
def for_channel_announcement(self) -> 'LnFeatures':
|
|
|
|
|
features = LnFeatures(0)
|
|
|
|
|
for flag in list_enabled_bits(self):
|
|
|
|
|
ctxs = _ln_feature_contexts[1 << flag]
|
|
|
|
|
if LnFeatureContexts.CHAN_ANN_AS_IS & ctxs:
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
elif LnFeatureContexts.CHAN_ANN_ALWAYS_EVEN & ctxs:
|
|
|
|
|
if flag % 2 == 0:
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
elif LnFeatureContexts.CHAN_ANN_ALWAYS_ODD & ctxs:
|
|
|
|
|
if flag % 2 == 0:
|
|
|
|
|
flag = get_ln_flag_pair_of_bit(flag)
|
|
|
|
|
features |= (1 << flag)
|
|
|
|
|
return features
|
|
|
|
|
|
2021-02-22 19:53:01 +01:00
|
|
|
def supports(self, feature: 'LnFeatures') -> bool:
|
|
|
|
|
"""Returns whether given feature is enabled.
|
|
|
|
|
|
|
|
|
|
Helper function that tries to hide the complexity of even/odd bits.
|
|
|
|
|
For example, instead of:
|
|
|
|
|
bool(myfeatures & LnFeatures.VAR_ONION_OPT or myfeatures & LnFeatures.VAR_ONION_REQ)
|
|
|
|
|
you can do:
|
|
|
|
|
myfeatures.supports(LnFeatures.VAR_ONION_OPT)
|
|
|
|
|
"""
|
2023-08-10 18:05:02 +00:00
|
|
|
if (1 << (feature.bit_length() - 1)) != feature:
|
2021-02-22 19:53:01 +01:00
|
|
|
raise ValueError(f"'feature' cannot be a combination of features: {feature}")
|
2023-08-10 18:05:02 +00:00
|
|
|
if feature.bit_length() % 2 == 0: # feature is OPT
|
|
|
|
|
feature_other = feature >> 1
|
|
|
|
|
else: # feature is REQ
|
|
|
|
|
feature_other = feature << 1
|
|
|
|
|
return (self & feature != 0) or (self & feature_other != 0)
|
2021-02-22 19:53:01 +01:00
|
|
|
|
2022-07-21 17:06:20 +02:00
|
|
|
def get_names(self) -> Sequence[str]:
|
2022-07-19 10:15:55 +02:00
|
|
|
r = []
|
|
|
|
|
for flag in list_enabled_bits(self):
|
2022-07-21 17:06:20 +02:00
|
|
|
feature_name = LnFeatures(1 << flag).name
|
|
|
|
|
r.append(feature_name or f"bit_{flag}")
|
2022-07-19 10:15:55 +02:00
|
|
|
return r
|
|
|
|
|
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
|
|
|
|
|
# performance improvement (avoid base2<->base10), see #8403
|
|
|
|
|
_numeric_repr_ = hex
|
|
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
|
# performance improvement (avoid base2<->base10), see #8403
|
|
|
|
|
return f"<{self._name_}: {hex(self._value_)}>"
|
|
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
|
# performance improvement (avoid base2<->base10), see #8403
|
|
|
|
|
return hex(self._value_)
|
|
|
|
|
|
2020-03-16 22:07:00 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_as('channel_type', _type=None)
|
2022-01-18 14:40:16 +01:00
|
|
|
class ChannelType(IntFlag):
|
|
|
|
|
OPTION_LEGACY_CHANNEL = 0
|
|
|
|
|
OPTION_STATIC_REMOTEKEY = 1 << 12
|
|
|
|
|
OPTION_ANCHOR_OUTPUTS = 1 << 20
|
|
|
|
|
OPTION_ANCHORS_ZERO_FEE_HTLC_TX = 1 << 22
|
2023-01-07 12:20:03 +01:00
|
|
|
OPTION_SCID_ALIAS = 1 << 46
|
|
|
|
|
OPTION_ZEROCONF = 1 << 50
|
2022-01-18 14:40:16 +01:00
|
|
|
|
|
|
|
|
def discard_unknown_and_check(self):
|
|
|
|
|
"""Discards unknown flags and checks flag combination."""
|
|
|
|
|
flags = list_enabled_bits(self)
|
|
|
|
|
known_channel_types = []
|
|
|
|
|
for flag in flags:
|
|
|
|
|
channel_type = ChannelType(1 << flag)
|
|
|
|
|
if channel_type.name:
|
|
|
|
|
known_channel_types.append(channel_type)
|
|
|
|
|
final_channel_type = known_channel_types[0]
|
|
|
|
|
for channel_type in known_channel_types[1:]:
|
|
|
|
|
final_channel_type |= channel_type
|
|
|
|
|
|
|
|
|
|
final_channel_type.check_combinations()
|
|
|
|
|
return final_channel_type
|
|
|
|
|
|
|
|
|
|
def check_combinations(self):
|
2023-01-07 12:20:03 +01:00
|
|
|
basic_type = self & ~(ChannelType.OPTION_SCID_ALIAS | ChannelType.OPTION_ZEROCONF)
|
|
|
|
|
if basic_type not in [
|
|
|
|
|
ChannelType.OPTION_STATIC_REMOTEKEY,
|
|
|
|
|
ChannelType.OPTION_ANCHOR_OUTPUTS | ChannelType.OPTION_STATIC_REMOTEKEY,
|
|
|
|
|
ChannelType.OPTION_ANCHORS_ZERO_FEE_HTLC_TX | ChannelType.OPTION_STATIC_REMOTEKEY
|
|
|
|
|
]:
|
2022-01-18 14:40:16 +01:00
|
|
|
raise ValueError("Channel type is not a valid flag combination.")
|
|
|
|
|
|
|
|
|
|
def complies_with_features(self, features: LnFeatures) -> bool:
|
|
|
|
|
flags = list_enabled_bits(self)
|
|
|
|
|
complies = True
|
|
|
|
|
for flag in flags:
|
|
|
|
|
feature = LnFeatures(1 << flag)
|
|
|
|
|
complies &= features.supports(feature)
|
|
|
|
|
return complies
|
|
|
|
|
|
|
|
|
|
def to_bytes_minimal(self):
|
|
|
|
|
# MUST use the smallest bitmap possible to represent the channel type.
|
|
|
|
|
bit_length =self.value.bit_length()
|
|
|
|
|
byte_length = bit_length // 8 + int(bool(bit_length % 8))
|
|
|
|
|
return self.to_bytes(byte_length, byteorder='big')
|
|
|
|
|
|
2022-01-18 14:37:49 +01:00
|
|
|
@property
|
|
|
|
|
def name_minimal(self):
|
2023-01-07 12:20:03 +01:00
|
|
|
if self.name:
|
|
|
|
|
return self.name.replace('OPTION_', '')
|
|
|
|
|
else:
|
|
|
|
|
return str(self)
|
2022-01-18 14:37:49 +01:00
|
|
|
|
2022-01-18 14:40:16 +01:00
|
|
|
|
2020-03-16 22:07:00 +01:00
|
|
|
del LNFC # name is ambiguous without context
|
|
|
|
|
|
|
|
|
|
# features that are actually implemented and understood in our codebase:
|
|
|
|
|
# (note: this is not what we send in e.g. init!)
|
|
|
|
|
# (note: specify both OPT and REQ here)
|
|
|
|
|
LN_FEATURES_IMPLEMENTED = (
|
|
|
|
|
LnFeatures(0)
|
|
|
|
|
| LnFeatures.OPTION_DATA_LOSS_PROTECT_OPT | LnFeatures.OPTION_DATA_LOSS_PROTECT_REQ
|
|
|
|
|
| LnFeatures.GOSSIP_QUERIES_OPT | LnFeatures.GOSSIP_QUERIES_REQ
|
|
|
|
|
| LnFeatures.OPTION_STATIC_REMOTEKEY_OPT | LnFeatures.OPTION_STATIC_REMOTEKEY_REQ
|
2020-03-24 17:25:49 +01:00
|
|
|
| LnFeatures.VAR_ONION_OPT | LnFeatures.VAR_ONION_REQ
|
2020-03-24 20:07:00 +01:00
|
|
|
| LnFeatures.PAYMENT_SECRET_OPT | LnFeatures.PAYMENT_SECRET_REQ
|
2021-02-22 20:04:47 +01:00
|
|
|
| LnFeatures.BASIC_MPP_OPT | LnFeatures.BASIC_MPP_REQ
|
2023-01-13 12:46:55 +01:00
|
|
|
| LnFeatures.OPTION_TRAMPOLINE_ROUTING_OPT_ELECTRUM | LnFeatures.OPTION_TRAMPOLINE_ROUTING_REQ_ELECTRUM
|
2021-10-22 11:59:44 +02:00
|
|
|
| LnFeatures.OPTION_SHUTDOWN_ANYSEGWIT_OPT | LnFeatures.OPTION_SHUTDOWN_ANYSEGWIT_REQ
|
2022-01-18 14:55:43 +01:00
|
|
|
| LnFeatures.OPTION_CHANNEL_TYPE_OPT | LnFeatures.OPTION_CHANNEL_TYPE_REQ
|
2023-01-07 12:20:03 +01:00
|
|
|
| LnFeatures.OPTION_SCID_ALIAS_OPT | LnFeatures.OPTION_SCID_ALIAS_REQ
|
2021-10-15 11:08:10 +02:00
|
|
|
| LnFeatures.OPTION_ANCHORS_ZERO_FEE_HTLC_OPT | LnFeatures.OPTION_ANCHORS_ZERO_FEE_HTLC_REQ
|
2020-03-16 22:07:00 +01:00
|
|
|
)
|
2018-10-09 20:10:26 +02:00
|
|
|
|
|
|
|
|
|
2019-08-02 17:58:45 +02:00
|
|
|
def get_ln_flag_pair_of_bit(flag_bit: int) -> int:
|
2018-10-09 20:10:26 +02:00
|
|
|
"""Ln Feature flags are assigned in pairs, one even, one odd. See BOLT-09.
|
|
|
|
|
Return the other flag from the pair.
|
|
|
|
|
e.g. 6 -> 7
|
|
|
|
|
e.g. 7 -> 6
|
|
|
|
|
"""
|
|
|
|
|
if flag_bit % 2 == 0:
|
|
|
|
|
return flag_bit + 1
|
|
|
|
|
else:
|
|
|
|
|
return flag_bit - 1
|
|
|
|
|
|
|
|
|
|
|
2020-03-25 13:44:39 +01:00
|
|
|
|
|
|
|
|
class IncompatibleOrInsaneFeatures(Exception): pass
|
|
|
|
|
class UnknownEvenFeatureBits(IncompatibleOrInsaneFeatures): pass
|
|
|
|
|
class IncompatibleLightningFeatures(IncompatibleOrInsaneFeatures): pass
|
|
|
|
|
|
2020-02-26 21:10:33 +01:00
|
|
|
|
2020-03-16 22:07:00 +01:00
|
|
|
def ln_compare_features(our_features: 'LnFeatures', their_features: int) -> 'LnFeatures':
|
|
|
|
|
"""Returns negotiated features.
|
|
|
|
|
Raises IncompatibleLightningFeatures if incompatible.
|
|
|
|
|
"""
|
2020-02-21 10:57:13 +01:00
|
|
|
our_flags = set(list_enabled_bits(our_features))
|
|
|
|
|
their_flags = set(list_enabled_bits(their_features))
|
2020-03-16 22:07:00 +01:00
|
|
|
# check that they have our required features, and disable the optional features they don't have
|
2020-02-21 10:57:13 +01:00
|
|
|
for flag in our_flags:
|
|
|
|
|
if flag not in their_flags and get_ln_flag_pair_of_bit(flag) not in their_flags:
|
|
|
|
|
# they don't have this feature we wanted :(
|
|
|
|
|
if flag % 2 == 0: # even flags are compulsory
|
2020-03-16 22:07:00 +01:00
|
|
|
raise IncompatibleLightningFeatures(f"remote does not support {LnFeatures(1 << flag)!r}")
|
2020-02-21 10:57:13 +01:00
|
|
|
our_features ^= 1 << flag # disable flag
|
|
|
|
|
else:
|
|
|
|
|
# They too have this flag.
|
|
|
|
|
# For easier feature-bit-testing, if this is an even flag, we also
|
|
|
|
|
# set the corresponding odd flag now.
|
|
|
|
|
if flag % 2 == 0 and our_features & (1 << flag):
|
|
|
|
|
our_features |= 1 << get_ln_flag_pair_of_bit(flag)
|
2020-03-16 22:07:00 +01:00
|
|
|
# check that we have their required features
|
|
|
|
|
for flag in their_flags:
|
|
|
|
|
if flag not in our_flags and get_ln_flag_pair_of_bit(flag) not in our_flags:
|
|
|
|
|
# we don't have this feature they wanted :(
|
|
|
|
|
if flag % 2 == 0: # even flags are compulsory
|
|
|
|
|
raise IncompatibleLightningFeatures(f"remote wanted feature we don't have: {LnFeatures(1 << flag)!r}")
|
2020-02-21 10:57:13 +01:00
|
|
|
return our_features
|
|
|
|
|
|
2018-07-27 20:59:04 +02:00
|
|
|
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
if hasattr(sys, "get_int_max_str_digits"):
|
|
|
|
|
# check that the user or other library has not lowered the limit (from default)
|
|
|
|
|
assert sys.get_int_max_str_digits() >= 4300, f"sys.get_int_max_str_digits() too low: {sys.get_int_max_str_digits()}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def validate_features(features: int) -> LnFeatures:
|
2020-03-25 13:44:39 +01:00
|
|
|
"""Raises IncompatibleOrInsaneFeatures if
|
|
|
|
|
- a mandatory feature is listed that we don't recognize, or
|
|
|
|
|
- the features are inconsistent
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
For convenience, returns the parsed features.
|
2020-03-24 15:31:19 +01:00
|
|
|
"""
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
if features.bit_length() > 10_000:
|
|
|
|
|
# This is an implementation-specific limit for how high feature bits we allow.
|
|
|
|
|
# Needed as LnFeatures subclasses IntFlag, and uses ints internally.
|
|
|
|
|
# See https://docs.python.org/3/library/stdtypes.html#integer-string-conversion-length-limitation
|
|
|
|
|
raise IncompatibleOrInsaneFeatures(f"features bitvector too large: {features.bit_length()=} > 10_000")
|
2020-03-25 13:44:39 +01:00
|
|
|
features = LnFeatures(features)
|
2020-03-24 15:31:19 +01:00
|
|
|
enabled_features = list_enabled_bits(features)
|
|
|
|
|
for fbit in enabled_features:
|
|
|
|
|
if (1 << fbit) & LN_FEATURES_IMPLEMENTED == 0 and fbit % 2 == 0:
|
|
|
|
|
raise UnknownEvenFeatureBits(fbit)
|
2020-11-20 08:51:01 +01:00
|
|
|
if not features.validate_transitive_dependencies():
|
2020-09-11 19:56:04 +02:00
|
|
|
raise IncompatibleOrInsaneFeatures(f"not all transitive dependencies are set. "
|
|
|
|
|
f"features={features}")
|
lnutil.LnFeatures: limit max feature bit to 10_000
closes https://github.com/spesmilo/electrum/issues/8403
> In Python 3.10 that worked fine, however in Python 3.11 large integer check https://github.com/python/cpython/issues/95778, so now this throws an error.
Apparently this change was deemed a security fix and was backported to all supported branches of CPython (going back to 3.7). i.e. it affects ~all versions of python (if sufficiently updated with bugfix patches), not just 3.11
> Some offending node aliases:
> ```
> ergvein-fiatchannels
> test-mainnet
> arakis
> ```
The features bits set by some of these nodes:
```
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 45, 32973, 52973)
(1, 7, 8, 11, 13, 14, 17, 19, 23, 27, 39, 45, 55, 32973, 52973)
```
> P.S. I see there are a lot of nodes with 253 bytes in their feature vectors. Any idea why that could happen?
Note that the valid [merged-into-spec features](https://github.com/lightning/bolts/blob/50b2df24a27879e8329712c275db78876fd022fe/09-features.md) currently only go as high as ~51.
However the spec does not specify how to choose feature bits for experimental stuff, so I guess some people are using values in the 50k range. The only limit imposed by the spec on the length of the features bitvector is an implicit one due to the max message size: every msg must be smaller than 65KB, and the features bitvector needs to fit inside the init message, hence it can be up to ~524K bits.
(note that the features are not stored in a sparse representation in the init message and in gossip messages, so if many nodes set such high feature bits, that would noticably impact the size of the gossip).
-----
Anyway, our current implementation of LnFeatures is subclassing IntFlag, and it looks like it does not work well for such large integers. I've managed to make IntFlags reasonably in python 3.11 by overriding __str__ and __repr__ (note that in cpython it is apparently only the base2<->base10 conversions that are slow, power-of-2 conversions are fast, so we can e.g. use `hex()`). However in python 3.10 and older, enum.py itself seems really slow for bigints, e.g. enum._decompose in python 3.10.
Try e.g. this script, which is instant in py3.11 but takes minutes in py3.10:
```py
from enum import IntFlag
class c(IntFlag):
known_flag_1 = 1 << 0
known_flag_2 = 1 << 1
known_flag_3 = 1 << 2
if hasattr(IntFlag, "_numeric_repr_"): # python 3.11+
_numeric_repr_ = hex
def __repr__(self):
return f"<{self._name_}: {hex(self._value_)}>"
def __str__(self):
return hex(self._value_)
a = c(2**70000-1)
q1 = repr(a)
q2 = str(a)
```
AFAICT we have two options: either we rewrite LnFeatures so that it does not use IntFlag (and enum.py), or, for the short term as workaround, we could just reject very large feature bits.
For now, I've opted to the latter, rejecting feature bits over 10k.
(note that another option is bumping the min required python to 3.11, in which case with the overrides added in this commit the performance looks perfectly fine)
2023-05-08 19:37:33 +00:00
|
|
|
return features
|
2020-03-24 15:31:19 +01:00
|
|
|
|
|
|
|
|
|
2020-03-24 20:07:00 +01:00
|
|
|
def derive_payment_secret_from_payment_preimage(payment_preimage: bytes) -> bytes:
|
|
|
|
|
"""Returns secret to be put into invoice.
|
|
|
|
|
Derivation is deterministic, based on the preimage.
|
|
|
|
|
Crucially the payment_hash must be derived in an independent way from this.
|
|
|
|
|
"""
|
|
|
|
|
# Note that this could be random data too, but then we would need to store it.
|
|
|
|
|
# We derive it identically to clightning, so that we cannot be distinguished:
|
|
|
|
|
# https://github.com/ElementsProject/lightning/blob/faac4b28adee5221e83787d64cd5d30b16b62097/lightningd/invoice.c#L115
|
|
|
|
|
modified = bytearray(payment_preimage)
|
|
|
|
|
modified[0] ^= 1
|
|
|
|
|
return sha256(bytes(modified))
|
|
|
|
|
|
|
|
|
|
|
2019-11-29 00:53:18 +01:00
|
|
|
|
2018-07-27 20:59:04 +02:00
|
|
|
|
|
|
|
|
def get_compressed_pubkey_from_bech32(bech32_pubkey: str) -> bytes:
|
2021-03-17 18:11:55 +01:00
|
|
|
decoded_bech32 = segwit_addr.bech32_decode(bech32_pubkey)
|
|
|
|
|
hrp = decoded_bech32.hrp
|
|
|
|
|
data_5bits = decoded_bech32.data
|
|
|
|
|
if decoded_bech32.encoding is None:
|
|
|
|
|
raise ValueError("Bad bech32 checksum")
|
|
|
|
|
if decoded_bech32.encoding != segwit_addr.Encoding.BECH32:
|
|
|
|
|
raise ValueError("Bad bech32 encoding: must be using vanilla BECH32")
|
2018-07-27 20:59:04 +02:00
|
|
|
if hrp != 'ln':
|
|
|
|
|
raise Exception('unexpected hrp: {}'.format(hrp))
|
|
|
|
|
data_8bits = segwit_addr.convertbits(data_5bits, 5, 8, False)
|
|
|
|
|
# pad with zeroes
|
|
|
|
|
COMPRESSED_PUBKEY_LENGTH = 33
|
|
|
|
|
data_8bits = data_8bits + ((COMPRESSED_PUBKEY_LENGTH - len(data_8bits)) * [0])
|
|
|
|
|
return bytes(data_8bits)
|
2018-08-01 18:30:35 +02:00
|
|
|
|
|
|
|
|
|
2018-09-21 19:18:34 +02:00
|
|
|
def make_closing_tx(local_funding_pubkey: bytes, remote_funding_pubkey: bytes,
|
2019-10-23 17:09:41 +02:00
|
|
|
funding_txid: str, funding_pos: int, funding_sat: int,
|
|
|
|
|
outputs: List[PartialTxOutput]) -> PartialTransaction:
|
2018-10-22 20:39:44 +02:00
|
|
|
c_input = make_funding_input(local_funding_pubkey, remote_funding_pubkey,
|
|
|
|
|
funding_pos, funding_txid, funding_sat)
|
2019-10-23 17:09:41 +02:00
|
|
|
c_input.nsequence = 0xFFFF_FFFF
|
|
|
|
|
tx = PartialTransaction.from_io([c_input], outputs, locktime=0, version=2)
|
2018-09-21 19:18:34 +02:00
|
|
|
return tx
|
2018-09-27 16:43:33 +02:00
|
|
|
|
|
|
|
|
|
2024-10-21 15:04:37 +02:00
|
|
|
|
|
|
|
|
|
2018-10-05 15:37:47 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# key derivation
|
2024-05-22 13:39:27 +00:00
|
|
|
# originally based on lnd/keychain/derivation.go
|
|
|
|
|
# notes:
|
|
|
|
|
# - Add a new path for each use case. Do not reuse existing paths.
|
|
|
|
|
# (to avoid having to carefully consider if reuse would be safe)
|
|
|
|
|
# - Always prefer to use hardened derivation for new paths you add.
|
|
|
|
|
# (to avoid having to carefully consider if unhardened would be safe)
|
2018-10-05 15:37:47 +02:00
|
|
|
class LnKeyFamily(IntEnum):
|
2020-04-06 12:53:57 +02:00
|
|
|
MULTISIG = 0 | BIP32_PRIME
|
|
|
|
|
REVOCATION_BASE = 1 | BIP32_PRIME
|
|
|
|
|
HTLC_BASE = 2 | BIP32_PRIME
|
|
|
|
|
PAYMENT_BASE = 3 | BIP32_PRIME
|
|
|
|
|
DELAY_BASE = 4 | BIP32_PRIME
|
|
|
|
|
REVOCATION_ROOT = 5 | BIP32_PRIME
|
2018-10-05 15:37:47 +02:00
|
|
|
NODE_KEY = 6
|
2021-03-09 09:55:55 +01:00
|
|
|
BACKUP_CIPHER = 7 | BIP32_PRIME
|
2023-06-13 17:19:23 +02:00
|
|
|
PAYMENT_SECRET_KEY = 8 | BIP32_PRIME
|
2024-10-10 12:30:27 +02:00
|
|
|
NOSTR_KEY = 9 | BIP32_PRIME
|
lightning: change derivation of funding_pubkey
Ideally, given an on-chain backup, after the remote force-closes, we should be able to spend our anchor output,
to CPFP the remote commitment tx (assuming the channel used OPTION_ANCHORS).
To spend the anchor output, we need to be able to sign with the local funding_privkey.
Previously we derived the funding_key from the channel_seed (which comes from os.urandom).
Prior to anchors, there was no use case for signing with the funding_key given a channel backup.
Now with anchors, we should make its derivation deterministic somehow, in a way so that it can
be derived given just an on-chain backup.
- one way would be to put some more data into the existing OP_RETURN
- uses block space
- the OP_RETURNs can be disabled via "use_recoverable_channels"
- only the initiator can use OP_RETURNs (so what if channel is in incoming dir?)
- instead, new scheme for our funding_key:
- we derive the funding_privkey from the lnworker root secret (derived from our bip32 seed)
- for outgoing channels:
- lnworker_root_secret + remote_node_id + funding_tx_nlocktime
- for incoming channels:
- lnworker_root_secret + remote_node_id + remote_funding_pubkey
- a check is added to avoid reusing the same key between channels:
not letting to user open more than one channel with the same peer in a single block
- only the first 16 bytes of the remote_node_id are used, as the onchain backup OP_RETURNs only contain that
- as the funding_privkey cannot be derived from the channel_seed anymore, it is included in the
imported channel backups, which in turn need a new version defined
- a wallet db upgrade is used to update already stored imported cbs
- alternatively we could keep the imported cbs as-is, so no new version, no new funding_privkey field, as it is clearly somewhat redundant given on-chain backups can reconstruct it
- however adding the field seems easier
- otherwise the existing code would try to derive the funding_privkey from the channel_seed
- also note: atm there is no field in the imported backups to distinguish anchor channels vs static-remotekey channels
2025-01-14 16:14:01 +00:00
|
|
|
FUNDING_ROOT_KEY = 10 | BIP32_PRIME
|
2018-10-05 15:37:47 +02:00
|
|
|
|
|
|
|
|
|
2020-02-21 12:18:28 +01:00
|
|
|
def generate_keypair(node: BIP32Node, key_family: LnKeyFamily) -> Keypair:
|
2020-02-21 15:33:15 +01:00
|
|
|
node2 = node.subkey_at_private_derivation([key_family, 0, 0])
|
2020-02-21 12:18:28 +01:00
|
|
|
k = node2.eckey.get_secret_bytes()
|
|
|
|
|
cK = ecc.ECPrivkey(k).get_public_key_bytes()
|
|
|
|
|
return Keypair(cK, k)
|
|
|
|
|
|
2024-10-10 12:30:27 +02:00
|
|
|
def generate_random_keypair() -> Keypair:
|
|
|
|
|
import secrets
|
|
|
|
|
k = secrets.token_bytes(32)
|
|
|
|
|
cK = ecc.ECPrivkey(k).get_public_key_bytes()
|
|
|
|
|
return Keypair(cK, k)
|
2018-10-05 19:37:55 +02:00
|
|
|
|
|
|
|
|
|
2018-10-19 21:47:51 +02:00
|
|
|
NUM_MAX_HOPS_IN_PAYMENT_PATH = 20
|
2019-09-06 18:36:21 +02:00
|
|
|
NUM_MAX_EDGES_IN_PAYMENT_PATH = NUM_MAX_HOPS_IN_PAYMENT_PATH
|
2018-10-19 21:47:51 +02:00
|
|
|
|
2019-09-06 18:09:05 +02:00
|
|
|
|
|
|
|
|
|
2019-01-21 21:27:27 +01:00
|
|
|
|
2019-02-14 17:53:09 +01:00
|
|
|
|
2020-03-17 20:28:59 +01:00
|
|
|
@attr.s(frozen=True)
|
|
|
|
|
class UpdateAddHtlc:
|
|
|
|
|
amount_msat = attr.ib(type=int, kw_only=True)
|
2021-03-02 18:53:08 +01:00
|
|
|
payment_hash = attr.ib(type=bytes, kw_only=True, converter=hex_to_bytes, repr=lambda val: val.hex())
|
2023-10-19 16:33:04 +00:00
|
|
|
cltv_abs = attr.ib(type=int, kw_only=True)
|
2020-03-17 20:28:59 +01:00
|
|
|
timestamp = attr.ib(type=int, kw_only=True)
|
|
|
|
|
htlc_id = attr.ib(type=int, kw_only=True, default=None)
|
2019-01-21 21:27:27 +01:00
|
|
|
|
2021-10-05 09:13:01 +02:00
|
|
|
@stored_in('adds', tuple)
|
2023-10-19 16:33:04 +00:00
|
|
|
def from_tuple(amount_msat, payment_hash, cltv_abs, htlc_id, timestamp) -> 'UpdateAddHtlc':
|
2021-10-05 09:13:01 +02:00
|
|
|
return UpdateAddHtlc(
|
|
|
|
|
amount_msat=amount_msat,
|
|
|
|
|
payment_hash=payment_hash,
|
2023-10-19 16:33:04 +00:00
|
|
|
cltv_abs=cltv_abs,
|
2021-10-05 09:13:01 +02:00
|
|
|
htlc_id=htlc_id,
|
|
|
|
|
timestamp=timestamp)
|
|
|
|
|
|
|
|
|
|
def to_json(self):
|
2023-10-19 16:33:04 +00:00
|
|
|
return (self.amount_msat, self.payment_hash, self.cltv_abs, self.htlc_id, self.timestamp)
|
2020-03-24 12:12:36 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class OnionFailureCodeMetaFlag(IntFlag):
|
|
|
|
|
BADONION = 0x8000
|
|
|
|
|
PERM = 0x4000
|
|
|
|
|
NODE = 0x2000
|
|
|
|
|
UPDATE = 0x1000
|
|
|
|
|
|
2021-01-11 15:19:50 +01:00
|
|
|
|
2023-10-27 16:01:23 +00:00
|
|
|
class PaymentFeeBudget(NamedTuple):
|
|
|
|
|
fee_msat: int
|
|
|
|
|
|
|
|
|
|
# The cltv budget covers the cost of route to get to the destination, but excluding the
|
|
|
|
|
# cltv-delta the destination wants for itself. (e.g. "min_final_cltv_delta" is excluded)
|
|
|
|
|
cltv: int # this is cltv-delta-like, no absolute heights here!
|
|
|
|
|
|
|
|
|
|
#num_htlc: int
|
|
|
|
|
|
|
|
|
|
@classmethod
|
2024-05-06 20:16:05 +00:00
|
|
|
def default(cls, *, invoice_amount_msat: int, config: 'SimpleConfig') -> 'PaymentFeeBudget':
|
|
|
|
|
millionths_orig = config.LIGHTNING_PAYMENT_FEE_MAX_MILLIONTHS
|
|
|
|
|
millionths = min(max(0, millionths_orig), 250_000) # clamp into [0, 25%]
|
|
|
|
|
cutoff_orig = config.LIGHTNING_PAYMENT_FEE_CUTOFF_MSAT
|
|
|
|
|
cutoff = min(max(0, cutoff_orig), 10_000_000) # clamp into [0, 10k sat]
|
|
|
|
|
if millionths != millionths_orig:
|
|
|
|
|
_logger.warning(
|
|
|
|
|
f"PaymentFeeBudget. found insane fee millionths in config. "
|
|
|
|
|
f"clamped: {millionths_orig}->{millionths}")
|
|
|
|
|
if cutoff != cutoff_orig:
|
|
|
|
|
_logger.warning(
|
|
|
|
|
f"PaymentFeeBudget. found insane fee cutoff in config. "
|
|
|
|
|
f"clamped: {cutoff_orig}->{cutoff}")
|
|
|
|
|
# for small payments, fees <= constant cutoff are fine
|
|
|
|
|
# for large payments, the max fee is percentage-based
|
|
|
|
|
fee_msat = invoice_amount_msat * millionths // 1_000_000
|
|
|
|
|
fee_msat = max(fee_msat, cutoff)
|
2023-10-27 16:01:23 +00:00
|
|
|
return PaymentFeeBudget(
|
2024-05-06 20:16:05 +00:00
|
|
|
fee_msat=fee_msat,
|
2023-10-27 16:01:23 +00:00
|
|
|
cltv=NBLOCK_CLTV_DELTA_TOO_FAR_INTO_FUTURE,
|
|
|
|
|
)
|