Files
palladum-lightning/tools/reckless
Tatiana Moroz 0173610527 Fix Reckless search command not finding partial matches
The Reckless search command was only returning a result if you
searched a perfect match, which is not too helpful.  This updates the
command so that partial search matches return a result.

Before:
reckless search bolt
Search exhausted all sources
reckless search bol
Search exhausted all sources
reckless search bolt12-pris
Search exhausted all sources

After:
reckless search bolt
Plugins matching 'bolt':
  bolt12-prism (https://github.com/lightningd/plugins)
reckless search bol
Plugins matching 'bol':
  bolt12-prism (https://github.com/lightningd/plugins)
reckless search bolt12-pris
Plugins matching 'bolt12-pris':
  bolt12-prism (https://github.com/lightningd/plugins)

Changelog-Fixed: reckless search now returns partial matches instead of requiring exact plugin names.
2026-02-02 13:01:00 -06:00

2146 lines
83 KiB
Python
Executable File

#!/usr/bin/env python3
import sys
import argparse
import copy
import datetime
from enum import Enum
import json
import logging
import os
from pathlib import Path, PosixPath
import shutil
from subprocess import Popen, PIPE, TimeoutExpired, run
import tempfile
import time
import types
from typing import Union
from urllib.parse import urlparse
from urllib.request import urlopen
from urllib.error import HTTPError
import venv
__VERSION__ = 'v25.12'
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
handlers=[logging.StreamHandler(stream=sys.stdout)],
)
LAST_FOUND = None
def chunk_string(string: str, size: int):
for i in range(0, len(string), size):
yield string[i: i + size]
def ratelimit_output(output: str):
sys.stdout.reconfigure(encoding='utf-8')
for i in chunk_string(output, 1024):
sys.stdout.write(i)
sys.stdout.flush()
time.sleep(0.01)
class Logger:
"""Redirect logging output to a json object or stdout as appropriate."""
def __init__(self, capture: bool = False):
self.json_output = {"result": [],
"log": []}
self.capture = capture
def str_esc(self, raw_string: str) -> str:
assert isinstance(raw_string, str)
return json.dumps(raw_string)[1:-1]
def debug(self, to_log: str):
assert isinstance(to_log, str) or hasattr(to_log, "__repr__")
if logging.root.level > logging.DEBUG:
return
if self.capture:
self.json_output['log'].append(self.str_esc(f"DEBUG: {to_log}"))
else:
logging.debug(to_log)
def info(self, to_log: str):
assert isinstance(to_log, str) or hasattr(to_log, "__repr__")
if logging.root.level > logging.INFO:
return
if self.capture:
self.json_output['log'].append(self.str_esc(f"INFO: {to_log}"))
else:
print(to_log)
def warning(self, to_log: str):
assert isinstance(to_log, str) or hasattr(to_log, "__repr__")
if logging.root.level > logging.WARNING:
return
if self.capture:
self.json_output['log'].append(self.str_esc(f"WARNING: {to_log}"))
else:
logging.warning(to_log)
def error(self, to_log: str):
assert isinstance(to_log, str) or hasattr(to_log, "__repr__")
if logging.root.level > logging.ERROR:
return
if self.capture:
self.json_output['log'].append(self.str_esc(f"ERROR: {to_log}"))
else:
logging.error(to_log)
def add_result(self, result: Union[str, None]):
assert json.dumps(result), "result must be json serializable"
self.json_output["result"].append(result)
def reply_json(self):
"""json output to stdout with accumulated result."""
if len(log.json_output["result"]) == 1 and \
isinstance(log.json_output["result"][0], list):
# unpack sources output
log.json_output["result"] = log.json_output["result"][0]
output = json.dumps(log.json_output, indent=3) + '\n'
ratelimit_output(output)
log = Logger()
repos = ['https://github.com/lightningd/plugins']
def reckless_abort(err: str):
log.error(err)
log.add_result(None)
log.reply_json()
sys.exit(1)
def py_entry_guesses(name) -> list:
return [name, f'{name}.py', '__init__.py']
def unsupported_entry(name) -> list:
return [f'{name}.go', f'{name}.sh']
def entry_guesses(name: str) -> list:
guesses = []
for inst in INSTALLERS:
for entry in inst.entries:
guesses.append(entry.format(name=name))
return guesses
class Installer:
'''
The identification of a plugin language, compiler or interpreter
availability, and the install procedures.
'''
def __init__(self, name: str,
exe: Union[str, None] = None,
compiler: Union[str, None] = None,
manager: Union[str, None] = None,
entry: Union[str, None] = None):
self.name = name
self.entries = []
if entry:
self.entries.append(entry)
self.exe = exe # interpreter (if required)
self.compiler = compiler # compiler bin (if required)
self.manager = manager # dependency manager (if required)
self.dependency_file = None
self.dependency_call = None
def __repr__(self):
return (f'<Installer {self.name}: '
f'exe: {self.exe}, manager: {self.manager}>')
def executable(self) -> bool:
'''Validate the necessary bins are available to execute the plugin.'''
if self.exe:
if shutil.which(self.exe):
# This should arguably not be checked here.
if self.manager:
if shutil.which(self.manager):
return True
return False
return True
return False
return True
def installable(self) -> bool:
'''Validate the necessary compiler and package manager executables are
available to install. If these are defined, they are considered
mandatory even though the user may have the requisite packages already
installed.'''
if self.compiler and not shutil.which(self.compiler):
return False
if self.manager and not shutil.which(self.manager):
return False
return True
def add_entrypoint(self, entry: str):
assert isinstance(entry, str)
self.entries.append(entry)
def get_entrypoints(self, name: str):
guesses = []
for entry in self.entries:
guesses.append(entry.format(name=name))
return guesses
def add_dependency_file(self, dep: str):
assert isinstance(dep, str)
self.dependency_file = dep
def add_dependency_call(self, call: list):
if self.dependency_call is None:
self.dependency_call = []
self.dependency_call.append(call)
def copy(self):
return copy.deepcopy(self)
class InstInfo:
def __init__(self, name: str, location: str, git_url: str):
self.name = name
self.source_loc = str(location) # Used for 'git clone'
self.git_url: str = git_url # API access for github repos
self.srctype: Source = Source.get_type(location)
self.entry: SourceFile = None # relative to source_loc or subdir
self.deps: str = None
self.subdir: str = None
self.commit: str = None
def __repr__(self):
return (f'InstInfo({self.name}, {self.source_loc}, {self.git_url}, '
f'{self.entry}, {self.deps}, {self.subdir})')
def get_repo_commit(self) -> Union[str, None]:
"""The latest commit from a remote repo or the HEAD of a local repo."""
if self.srctype in [Source.LOCAL_REPO, Source.GIT_LOCAL_CLONE]:
git = run(['git', 'rev-parse', 'HEAD'], cwd=str(self.source_loc),
stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=10)
if git.returncode != 0:
return None
return git.stdout.splitlines()[0]
if self.srctype == Source.GITHUB_REPO:
parsed_url = urlparse(self.source_loc)
if 'github.com' not in parsed_url.netloc:
return None
if len(parsed_url.path.split('/')) < 2:
return None
start = 1
# Maybe we were passed an api.github.com/repo/<user> url
if 'api' in parsed_url.netloc:
start += 1
repo_user = parsed_url.path.split('/')[start]
repo_name = parsed_url.path.split('/')[start + 1]
api_url = f'{API_GITHUB_COM}/repos/{repo_user}/{repo_name}/commits?ref=HEAD'
r = urlopen(api_url, timeout=5)
if r.status != 200:
return None
try:
return json.loads(r.read().decode())['0']['sha']
except:
return None
def get_inst_details(self) -> bool:
"""Search the source_loc for plugin install details.
This may be necessary if a contents api is unavailable.
Extracts entrypoint and dependencies if searchable, otherwise
matches a directory to the plugin name and stops."""
if self.srctype == Source.DIRECTORY:
assert Path(self.source_loc).exists()
assert os.path.isdir(self.source_loc)
target = SourceDir(self.source_loc, srctype=self.srctype)
# Set recursion for how many directories deep we should search
depth = 0
if self.srctype in [Source.DIRECTORY, Source.LOCAL_REPO,
Source.GIT_LOCAL_CLONE]:
depth = 5
elif self.srctype == Source.GITHUB_REPO:
depth = 1
def search_dir(self, sub: SourceDir, subdir: bool,
recursion: int) -> Union[SourceDir, None]:
assert isinstance(recursion, int)
# carveout for archived plugins in lightningd/plugins. Other repos
# are only searched by API at the top level.
if recursion == 0 and 'archive' in sub.name.lower():
pass
# If unable to search deeper, resort to matching directory name
elif recursion < 1:
if sub.name.lower() == self.name.lower():
# Partial success (can't check for entrypoint)
self.name = sub.name
return sub
return None
sub.populate()
if sub.name.lower() == self.name.lower():
# Directory matches the name we're trying to install, so check
# for entrypoint and dependencies.
for inst in INSTALLERS:
for g in inst.get_entrypoints(self.name):
found_entry = sub.find(g, ftype=SourceFile)
if found_entry:
break
# FIXME: handle a list of dependencies
found_dep = sub.find(inst.dependency_file,
ftype=SourceFile)
if found_entry:
# Success!
if found_dep:
self.name = sub.name
self.entry = found_entry.name
self.deps = found_dep.name
return sub
log.debug(f"missing dependency for {self}")
found_entry = None
for file in sub.contents:
if isinstance(file, SourceDir):
assert file.relative
success = search_dir(self, file, True, recursion - 1)
if success:
return success
return None
try:
result = search_dir(self, target, False, depth)
# Using the rest API of github.com may result in a
# "Error 403: rate limit exceeded" or other access issues.
# Fall back to cloning and searching the local copy instead.
except HTTPError:
result = None
if self.srctype == Source.GITHUB_REPO:
# clone source to reckless dir
target = copy_remote_git_source(self)
if not target:
log.warning(f"could not clone github source {self}")
return False
log.debug(f"falling back to cloning remote repo {self}")
# Update to reflect use of a local clone
self.source_loc = str(target.location)
self.srctype = target.srctype
result = search_dir(self, target, False, 5)
if not result:
return False
if result:
if result != target:
if result.relative:
self.subdir = result.relative
else:
# populate() should always assign a relative path
# if not in the top-level source directory
assert self.subdir == result.name
return True
return False
def create_dir(directory: PosixPath) -> bool:
try:
Path(directory).mkdir(parents=False, exist_ok=True)
return True
# Okay if directory already exists
except FileExistsError:
return True
# Parent directory missing
except FileNotFoundError:
return False
def remove_dir(directory: str) -> bool:
try:
shutil.rmtree(directory)
return True
except NotADirectoryError:
log.warning(f"Tried to remove directory {directory} that "
"does not exist.")
except PermissionError:
log.warning(f"Permission denied removing dir: {directory}")
return False
class Source(Enum):
DIRECTORY = 1
LOCAL_REPO = 2
GITHUB_REPO = 3
OTHER_URL = 4
UNKNOWN = 5
# Cloned from remote source before searching (rather than github API)
GIT_LOCAL_CLONE = 6
@classmethod
def get_type(cls, source: str):
if Path(os.path.realpath(source)).exists():
if os.path.isdir(os.path.realpath(source)):
# returns 0 if git repository
proc = run(['git', '-C', source, 'rev-parse'],
cwd=os.path.realpath(source), stdout=PIPE,
stderr=PIPE, text=True, timeout=5)
if proc.returncode == 0:
return cls(2)
return cls(1)
if 'github.com' in source.lower():
return cls(3)
if 'http://' in source.lower() or 'https://' in source.lower():
return cls(4)
return cls(5)
@classmethod
def get_github_user_repo(cls, source: str) -> (str, str):
'extract a github username and repository name'
if 'github.com/' not in source.lower():
return None, None
trailing = Path(source.lower().partition('github.com/')[2]).parts
if len(trailing) < 2:
return None, None
return trailing[0], trailing[1]
class SourceDir():
"""Structure to search source contents."""
def __init__(self, location: str, srctype: Source = None, name: str = None,
relative: str = None):
self.location = str(location)
if name:
self.name = name
else:
self.name = Path(location).name
self.contents = []
self.srctype = srctype
self.prepopulated = False
self.relative = relative # location relative to source
def populate(self):
"""populates contents of the directory at least one level"""
if self.prepopulated:
return
if not self.srctype:
self.srctype = Source.get_type(self.location)
if self.srctype == Source.DIRECTORY:
self.contents = populate_local_dir(self.location)
elif self.srctype in [Source.LOCAL_REPO, Source.GIT_LOCAL_CLONE]:
self.contents = populate_local_repo(self.location)
elif self.srctype == Source.GITHUB_REPO:
self.contents = populate_github_repo(self.location)
else:
raise Exception("populate method undefined for {self.srctype}")
# Ensure the relative path of the contents is inherited.
for c in self.contents:
if self.relative is None:
c.relative = c.name
else:
c.relative = str(Path(self.relative) / c.name)
def find(self, name: str, ftype: type = None) -> str:
"""Match a SourceFile or SourceDir to the provided name
(case insentive) and return its filename."""
assert isinstance(name, str)
if len(self.contents) == 0:
return None
for c in self.contents:
if ftype and not isinstance(c, ftype):
continue
if c.name.lower() == name.lower():
return c
return None
def __repr__(self):
return f"<SourceDir: {self.name}, {self.location}, {self.relative}>"
def __eq__(self, compared):
if isinstance(compared, str):
return self.name == compared
if isinstance(compared, SourceDir):
return (self.name == compared.name and
self.location == compared.location)
return False
class SourceFile():
def __init__(self, location: str):
self.location = str(location)
self.name = Path(location).name
def __repr__(self):
return f"<SourceFile: {self.name} ({self.location})>"
def __eq__(self, compared):
if isinstance(compared, str):
return self.name == compared
if isinstance(compared, SourceFile):
return (self.name == compared.name and
self.location == compared.location)
return False
def populate_local_dir(path: str) -> list:
assert Path(os.path.realpath(path)).exists()
contents = []
for c in os.listdir(path):
fullpath = Path(path) / c
if os.path.isdir(fullpath):
# Inheriting type saves a call to test if it's a git repo
contents.append(SourceDir(fullpath, srctype=Source.DIRECTORY))
else:
contents.append(SourceFile(fullpath))
return contents
def populate_local_repo(path: str, parent=None) -> list:
assert Path(os.path.realpath(path)).exists()
if parent is None:
basedir = SourceDir('base')
else:
assert isinstance(parent, SourceDir)
basedir = parent
def populate_source_path(parent: SourceDir, mypath: PosixPath,
relative: str = None):
"""`git ls-tree` lists all files with their full path.
This populates all intermediate directories and the file."""
parentdir = parent
if mypath == '.':
log.debug(' asked to populate root dir')
return
# reverse the parents
pdirs = mypath
revpath = []
child = parentdir
while pdirs.parent.name != '':
revpath.append(pdirs.parent.name)
pdirs = pdirs.parent
for p in reversed(revpath):
child = parentdir.find(p)
if child:
parentdir = child
else:
if p == revpath[-1]:
relative_path = None
if parentdir.relative:
relative_path = parentdir.relative
elif parentdir.relative:
relative_path = str(Path(parentdir.relative) /
parentdir.name)
else:
relative_path = parentdir.name
child = SourceDir(p, srctype=Source.LOCAL_REPO,
relative=relative_path)
# ls-tree lists every file in the repo with full path.
# No need to populate each directory individually.
child.prepopulated = True
parentdir.contents.append(child)
parentdir = child
newfile = SourceFile(mypath.name)
child.contents.append(newfile)
# Submodules contents are populated separately
proc = run(['git', '-C', path, 'submodule', 'status'],
stdout=PIPE, stderr=PIPE, text=True, timeout=5)
if proc.returncode != 0:
log.debug(f"'git submodule status' of repo {path} failed")
return None
submodules = []
for sub in proc.stdout.splitlines():
submodules.append(sub.split()[1])
# FIXME: Pass in tag or commit hash
ver = 'HEAD'
git_call = ['git', '-C', path, 'ls-tree', '--full-tree', '-r',
'--name-only', ver]
proc = run(git_call, stdout=PIPE, stderr=PIPE, text=True, timeout=5)
if proc.returncode != 0:
log.debug(f'ls-tree of repo {path} failed')
return None
for filepath in proc.stdout.splitlines():
if filepath in submodules:
if parent is None:
relative_path = filepath
elif basedir.relative:
relative_path = str(Path(basedir.relative) / filepath)
assert relative_path
submodule_dir = SourceDir(filepath, srctype=Source.LOCAL_REPO,
relative=relative_path)
populate_local_repo(Path(path) / filepath, parent=submodule_dir)
submodule_dir.prepopulated = True
basedir.contents.append(submodule_dir)
else:
populate_source_path(basedir, Path(filepath))
return basedir.contents
def source_element_from_repo_api(member: dict):
# api accessed via <repo>/contents/
if 'type' in member and 'name' in member and 'git_url' in member:
if member['type'] == 'dir':
return SourceDir(member['git_url'], srctype=Source.GITHUB_REPO,
name=member['name'])
elif member['type'] == 'file':
# Likely a submodule
if member['size'] == 0:
return SourceDir(None, srctype=Source.GITHUB_REPO,
name=member['name'])
return SourceFile(member['name'])
elif member['type'] == 'commit':
# No path is given by the api here
return SourceDir(None, srctype=Source.GITHUB_REPO,
name=member['name'])
# git_url with <repo>/tree/ presents results a little differently
elif 'type' in member and 'path' in member and 'url' in member:
if member['type'] not in ['tree', 'blob']:
log.debug(f' skipping {member["path"]} type={member["type"]}')
if member['type'] == 'tree':
return SourceDir(member['url'], srctype=Source.GITHUB_REPO,
name=member['path'])
elif member['type'] == 'blob':
# This can be a submodule
if member['size'] == 0:
return SourceDir(member['git_url'], srctype=Source.GITHUB_REPO,
name=member['name'])
return SourceFile(member['path'])
elif member['type'] == 'commit':
# No path is given by the api here
return SourceDir(None, srctype=Source.GITHUB_REPO,
name=member['name'])
return None
def populate_github_repo(url: str) -> list:
"""populate one level of a github repository via REST API"""
# Forces search to clone remote repos (for blackbox testing)
if GITHUB_API_FALLBACK:
with tempfile.NamedTemporaryFile() as tmp:
raise HTTPError(url, 403, 'simulated ratelimit', {}, tmp)
# FIXME: This probably contains leftover cruft.
repo = url.split('/')
while '' in repo:
repo.remove('')
repo_name = None
parsed_url = urlparse(url)
if 'github.com' not in parsed_url.netloc:
return None
if len(parsed_url.path.split('/')) < 2:
return None
start = 1
# Maybe we were passed an api.github.com/repo/<user> url
if 'api' in parsed_url.netloc:
start += 1
repo_user = parsed_url.path.split('/')[start]
repo_name = parsed_url.path.split('/')[start + 1]
# Get details from the github API.
if API_GITHUB_COM in url:
api_url = url
else:
api_url = f'{API_GITHUB_COM}/repos/{repo_user}/{repo_name}/contents/'
git_url = api_url
if "api.github.com" in git_url:
# This lets us redirect to handle blackbox testing
log.debug(f'fetching from gh API: {git_url}')
git_url = (API_GITHUB_COM + git_url.split("api.github.com")[-1])
# Ratelimiting occurs for non-authenticated GH API calls at 60 in 1 hour.
r = urlopen(git_url, timeout=5)
if r.status != 200:
return False
if 'git/tree' in git_url:
tree = json.loads(r.read().decode())['tree']
else:
tree = json.loads(r.read().decode())
contents = []
for sub in tree:
if source_element_from_repo_api(sub):
contents.append(source_element_from_repo_api(sub))
return contents
def copy_remote_git_source(github_source: InstInfo):
"""clone or fetch & checkout a local copy of a remote git repo"""
user, repo = Source.get_github_user_repo(github_source.source_loc)
if not user or not repo:
log.warning('could not extract github user and repo '
f'name for {github_source.source_loc}')
return None
local_path = RECKLESS_DIR / '.remote_sources' / user
create_dir(RECKLESS_DIR / '.remote_sources')
if not create_dir(local_path):
log.warning(f'could not provision dir {local_path} to '
f'clone remote source {github_source.source_loc}')
return None
local_path = local_path / repo
if local_path.exists():
# Fetch the latest
assert _git_update(github_source, local_path)
else:
_git_clone(github_source, local_path)
return SourceDir(local_path, srctype=Source.GIT_LOCAL_CLONE)
class Config():
"""A generic class for procuring, reading and editing config files"""
def obtain_config(self,
config_path: str,
default_text: str,
warn: bool = False) -> str:
"""Return a config file from the desired location. Create one with
default_text if it cannot be found."""
if isinstance(config_path, type(None)):
raise Exception("Generic config must be passed a config_path.")
assert isinstance(config_path, str)
# FIXME: warn if reckless dir exists, but conf not found
if Path(config_path).exists():
with open(config_path, 'r+') as f:
config_content = f.readlines()
return config_content
# redirecting the prompts to stderr is kinder for json consumers
tmp = sys.stdout
sys.stdout = sys.stderr
print(f'config file not found: {config_path}')
if warn:
confirm = input('press [Y] to create one now.\n').upper() == 'Y'
else:
confirm = True
sys.stdout = tmp
if not confirm:
reckless_abort(f"config file required: {config_path}")
parent_path = Path(config_path).parent
# Create up to one parent in the directory tree.
if create_dir(parent_path):
with open(self.conf_fp, 'w') as f:
f.write(default_text)
# FIXME: Handle write failure
return default_text
else:
log.warning('could not create the parent directory ' +
parent_path)
raise FileNotFoundError('invalid parent directory')
def editConfigFile(self, addline: Union[str, None],
removeline: Union[str, None]):
"""Idempotent function to add and/or remove a single line each."""
remove_these_lines = []
with open(self.conf_fp, 'r') as reckless_conf:
original = reckless_conf.readlines()
empty_lines = []
write_required = False
for n, l in enumerate(original):
if removeline and l.strip() == removeline.strip():
write_required = True
remove_these_lines.append(n)
continue
if l.strip() == '':
empty_lines.append(n)
if n-1 in empty_lines:
# The white space is getting excessive.
remove_these_lines.append(n)
continue
if not addline and not write_required:
return
# No write necessary if addline is already in config.
if addline and not write_required:
for line in original:
if line.strip() == addline.strip():
return
with open(self.conf_fp, 'w') as conf_write:
# no need to write if passed 'None'
line_exists = not bool(addline)
for n, l in enumerate(original):
if n not in remove_these_lines:
if n > 0:
conf_write.write(f'\n{l.strip()}')
else:
conf_write.write(l.strip())
if addline and addline.strip() == l.strip():
# addline is idempotent
line_exists = True
if not line_exists:
conf_write.write(f'\n{addline}')
def __init__(self, path: Union[str, None] = None,
default_text: Union[str, None] = None,
warn: bool = False):
assert path is not None
assert default_text is not None
self.conf_fp = path
self.content = self.obtain_config(self.conf_fp, default_text,
warn=warn)
class RecklessConfig(Config):
"""Reckless config (by default, specific to the bitcoin network only.)
This is inherited by the lightningd config and contains all reckless
maintained plugins."""
def enable_plugin(self, plugin_path: str):
"""Handle persistent plugin loading via config"""
self.editConfigFile(f'plugin={plugin_path}',
f'disable-plugin={plugin_path}')
def disable_plugin(self, plugin_path: str):
"""Handle persistent plugin disabling via config"""
self.editConfigFile(f'disable-plugin={plugin_path}',
f'plugin={plugin_path}')
def __init__(self, path: Union[str, None] = None,
default_text: Union[str, None] = None):
if path is None:
path = Path(LIGHTNING_DIR) / 'reckless' / 'bitcoin-reckless.conf'
if default_text is None:
default_text = (
'# This configuration file is managed by reckless to activate '
'and disable\n# reckless-installed plugins\n\n'
)
Config.__init__(self, path=str(path), default_text=default_text)
self.reckless_dir = Path(path).parent
class LightningBitcoinConfig(Config):
"""lightningd config specific to the bitcoin network. This is inherited by
the main lightningd config and in turn, inherits bitcoin-reckless.conf."""
def __init__(self, path: Union[str, None] = None,
default_text: Union[str, None] = None,
warn: bool = True):
if path is None:
path = Path(LIGHTNING_DIR).joinpath('bitcoin', 'config')
if default_text is None:
default_text = "# This config was autopopulated by reckless\n\n"
Config.__init__(self, path=str(path),
default_text=default_text, warn=warn)
class NotFoundError(Exception):
"""Raised by InferInstall when a source/entrypoint cannot be located."""
class InferInstall():
"""Once a plugin is installed, we may need its directory and entrypoint"""
def __init__(self, name: str):
reck_contents = os.listdir(RECKLESS_CONFIG.reckless_dir)
reck_contents_lower = {}
for f in reck_contents:
reck_contents_lower.update({f.lower(): f})
def match_name(name) -> str:
for tier in range(0, 10):
# Look for each installers preferred entrypoint format first
for inst in INSTALLERS:
# All of this installer's entrypoint options exhausted.
if tier >= len(inst.entries):
continue
fmt = inst.entries[tier]
if '{name}' in fmt:
pre = fmt.split('{name}')[0]
post = fmt.split('{name}')[-1]
if name.startswith(pre) and name.endswith(post):
return name.lstrip(pre).rstrip(post)
else:
if fmt == name:
return name
return name
name = match_name(name)
if name.lower() in reck_contents_lower.keys():
actual_name = reck_contents_lower[name.lower()]
self.dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name)
else:
raise NotFoundError("Could not find a reckless directory "
f"for {name}")
plug_dir = Path(RECKLESS_CONFIG.reckless_dir).joinpath(actual_name)
for guess in entry_guesses(actual_name):
for content in plug_dir.iterdir():
if content.name == guess:
self.entry = str(content)
self.name = actual_name
return
raise NotFoundError(f'plugin entrypoint not found in {self.dir}')
class InstallationFailure(Exception):
"raised when pip fails to complete dependency installation"
def create_python3_venv(staged_plugin: InstInfo) -> InstInfo:
"Create a virtual environment, install dependencies and test plugin."
env_path = Path('.venv')
env_path_full = Path(staged_plugin.source_loc) / env_path
assert staged_plugin.subdir # relative dir of original source
plugin_path = Path(staged_plugin.source_loc) / staged_plugin.subdir
if shutil.which('poetry') and staged_plugin.deps == 'pyproject.toml':
log.debug('configuring a python virtual environment (poetry) in '
f'{env_path_full}')
# The virtual environment should be located with the plugin.
# This installs it to .venv instead of in the global location.
mod_poetry_env = os.environ
mod_poetry_env['POETRY_VIRTUALENVS_IN_PROJECT'] = 'true'
# This ensures poetry installs to a new venv even though one may
# already be active (i.e., under CI)
if 'VIRTUAL_ENV' in mod_poetry_env:
del mod_poetry_env['VIRTUAL_ENV']
# to avoid relocating and breaking the venv, symlink pyroject.toml
# to the location of poetry's .venv dir
(Path(staged_plugin.source_loc) / 'pyproject.toml') \
.symlink_to(plugin_path / 'pyproject.toml')
(Path(staged_plugin.source_loc) / 'poetry.lock') \
.symlink_to(plugin_path / 'poetry.lock')
# Avoid redirecting stdout in order to stream progress.
# Timeout excluded as armv7 grpcio build/install can take 1hr.
pip = run(['poetry', 'install', '--no-root'], check=False,
cwd=staged_plugin.source_loc, env=mod_poetry_env,
stdout=stdout_redirect, stderr=stderr_redirect)
(Path(staged_plugin.source_loc) / 'pyproject.toml').unlink()
(Path(staged_plugin.source_loc) / 'poetry.lock').unlink()
else:
builder = venv.EnvBuilder(with_pip=True)
builder.create(env_path_full)
log.debug('configuring a python virtual environment (pip) in '
f'{env_path_full}')
log.debug(f'virtual environment created in {env_path_full}.')
if staged_plugin.deps == 'pyproject.toml':
pip = run(['bin/pip', 'install', str(plugin_path)],
check=False, cwd=plugin_path)
elif staged_plugin.deps == 'requirements.txt':
pip = run([str(env_path_full / 'bin/pip'), 'install', '-r',
str(plugin_path / 'requirements.txt')],
check=False, cwd=plugin_path,
stdout=stdout_redirect, stderr=stderr_redirect)
else:
log.debug("no python dependency file")
if pip and pip.returncode != 0:
log.error('error encountered installing dependencies')
raise InstallationFailure
staged_plugin.venv = env_path
log.info('dependencies installed successfully')
return staged_plugin
def create_wrapper(plugin: InstInfo):
'''The wrapper will activate the virtual environment for this plugin and
then run the plugin from within the same process.'''
assert hasattr(plugin, 'venv')
venv_full_path = Path(plugin.source_loc) / plugin.venv
with open(Path(plugin.source_loc) / plugin.entry, 'w') as wrapper:
wrapper.write((f"#!{venv_full_path}/bin/python\n"
"import sys\n"
"import runpy\n\n"
f"if '{plugin.source_loc}/{plugin.subdir}' not in "
"sys.path:\n"
f" sys.path.append('{plugin.source_loc}/"
f"{plugin.subdir}')\n"
f"if '{plugin.source_loc}' in sys.path:\n"
f" sys.path.remove('{plugin.source_loc}')\n"
f"runpy.run_module(\"{plugin.name}\", "
"{}, \"__main__\")"))
wrapper_file = Path(plugin.source_loc) / plugin.entry
wrapper_file.chmod(0o755)
def install_to_python_virtual_environment(cloned_plugin: InstInfo):
'''Called during install in place of a subprocess.run list'''
# Delete symlink so that a venv wrapper can take it's place
(Path(cloned_plugin.source_loc) / cloned_plugin.entry).unlink()
create_python3_venv(cloned_plugin)
if not hasattr(cloned_plugin, 'venv'):
raise InstallationFailure
log.debug('virtual environment for cloned plugin: '
f'{cloned_plugin.venv}')
create_wrapper(cloned_plugin)
return cloned_plugin
def cargo_installation(cloned_plugin: InstInfo):
call = ['cargo', 'build', '--release', '-vv']
# FIXME: the symlinked Cargo.toml allows the installer to identify a valid
# plugin directory, but is unneeded, and actually confuses cargo if not
# removed prior to installing.
cargo_toml_path = Path(cloned_plugin.source_loc) / 'Cargo.toml'
if cargo_toml_path.exists():
cargo_toml_path.unlink()
# source_loc now contains a symlink to the entrypoint and 'source/plugin/'
source = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.name
log.debug(f'cargo installing from {source}')
if logging.root.level < logging.INFO and not log.capture:
cargo = run(call, cwd=str(source), text=True)
else:
cargo = run(call, cwd=str(source), stdout=PIPE,
stderr=PIPE, text=True)
if cargo.returncode == 0:
log.debug('rust project compiled successfully')
else:
log.error(cargo.stderr if cargo.stderr else
'error encountered during build, cargo exited with return '
f'code {cargo.returncode}')
log.debug(f'removing {cloned_plugin.source_loc}')
remove_dir(cloned_plugin.source_loc)
raise InstallationFailure
# We do need to symlink to the executable binary though.
(Path(cloned_plugin.source_loc) / cloned_plugin.name).\
symlink_to(source / f'target/release/{cloned_plugin.name}')
cloned_plugin.entry = cloned_plugin.name
return cloned_plugin
def install_python_uv(cloned_plugin: InstInfo):
"""This uses the rust-based python plugin manager uv to manage the python
installation and create a virtual environment."""
source = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.name
# This virtual env path matches the other python installations and allows
# creating the wrapper in the same manner. Otherwise uv would build it in
# the source/{name} subdirectory.
cloned_plugin.venv = Path('.venv')
# We want the virtual env at the head of our directory structure and uv
# will need a pyproject.toml there in order to get started.
(Path(cloned_plugin.source_loc) / 'pyproject.toml').\
symlink_to(source / 'pyproject.toml')
call = ['uv', '-v', 'sync']
uv = run(call, cwd=str(cloned_plugin.source_loc), stdout=PIPE, stderr=PIPE,
text=True, check=False)
if uv.returncode != 0:
for line in uv.stderr.splitlines():
log.debug(line)
log.error('Failed to install virtual environment')
raise InstallationFailure('Failed to create virtual environment!')
# Delete entrypoint symlink so that a venv wrapper can take it's place
(Path(cloned_plugin.source_loc) / cloned_plugin.entry).unlink()
create_wrapper(cloned_plugin)
return cloned_plugin
def install_python_uv_legacy(cloned_plugin: InstInfo):
"""Install a python plugin with uv that was created with a requirements.txt.
This requires creating a bare virtual environment with uv first."""
source = Path(cloned_plugin.source_loc) / 'source' / cloned_plugin.name
cloned_plugin.venv = Path('.venv')
(Path(cloned_plugin.source_loc) / 'pyproject.toml').\
symlink_to(source / 'pyproject.toml')
(Path(cloned_plugin.source_loc) / 'requirements.txt').\
symlink_to(source / 'requirements.txt')
venv = run(['uv', 'venv'], cwd=str(cloned_plugin.source_loc),
stdout=PIPE, stderr=PIPE, text=True, check=False)
if venv.returncode != 0:
for line in venv.stderr.splitlines():
log.debug(line)
log.error('Failed to create virtual environment')
raise InstallationFailure('Failed to create virtual environment!')
for line in venv.stdout.splitlines():
log.debug(line)
for line in venv.stderr.splitlines():
log.debug(line)
# Running this as a shell allows overriding any active virtual environment
# which would make uv skip installing packages already present in the
# current env.
call = ['. .venv/bin/activate; uv pip install -r requirements.txt']
uv = run(call, shell=True, cwd=str(cloned_plugin.source_loc),
stdout=PIPE, stderr=PIPE, text=True, check=False)
if uv.returncode != 0:
for line in uv.stderr.splitlines():
log.debug(line)
log.error('Failed to install virtual environment')
raise InstallationFailure('Failed to create virtual environment!')
for line in uv.stdout.splitlines():
log.debug(line)
for line in uv.stderr.splitlines():
log.debug(line)
# Delete entrypoint symlink so that a venv wrapper can take it's place
(Path(cloned_plugin.source_loc) / cloned_plugin.entry).unlink()
create_wrapper(cloned_plugin)
log.info('dependencies installed successfully')
return cloned_plugin
python3venv = Installer('python3venv', exe='python3',
manager='pip', entry='{name}.py')
python3venv.add_entrypoint('{name}')
python3venv.add_entrypoint('__init__.py')
python3venv.add_dependency_file('requirements.txt')
python3venv.dependency_call = install_to_python_virtual_environment
poetryvenv = Installer('poetryvenv', exe='python3',
manager='poetry', entry='{name}.py')
poetryvenv.add_entrypoint('{name}')
poetryvenv.add_entrypoint('__init__.py')
poetryvenv.add_dependency_file('poetry.lock')
poetryvenv.add_dependency_file('pyproject.toml')
poetryvenv.dependency_call = install_to_python_virtual_environment
pyprojectViaPip = Installer('pyprojectViaPip', exe='python3',
manager='pip', entry='{name}.py')
pyprojectViaPip.add_entrypoint('{name}')
pyprojectViaPip.add_entrypoint('__init__.py')
pyprojectViaPip.add_dependency_file('pyproject.toml')
pyprojectViaPip.dependency_call = install_to_python_virtual_environment
pythonuv = Installer('pythonuv', exe='python3', manager='uv', entry="{name}.py")
pythonuv.add_dependency_file('uv.lock')
pythonuv.dependency_call = install_python_uv
pythonuvlegacy = Installer('pythonuvlegacy', exe='python3', manager='uv', entry='{name}.py')
pythonuvlegacy.add_dependency_file('requirements.txt')
pythonuvlegacy.dependency_call = install_python_uv_legacy
# Nodejs plugin installer
nodejs = Installer('nodejs', exe='node',
manager='npm', entry='{name}.js')
nodejs.add_entrypoint('{name}')
nodejs.add_dependency_call(['npm', 'install', '--omit=dev'])
nodejs.add_dependency_file('package.json')
# This entrypoint is used to identify a candidate directory, don't call it.
rust_cargo = Installer('rust', manager='cargo', entry='Cargo.toml')
rust_cargo.add_dependency_file('Cargo.toml')
rust_cargo.dependency_call = cargo_installation
INSTALLERS = [pythonuv, pythonuvlegacy, python3venv, poetryvenv,
pyprojectViaPip, nodejs, rust_cargo]
def help_alias(targets: list):
if len(targets) == 0:
parser.print_help(sys.stdout)
else:
log.info('try "reckless {} -h"'.format(' '.join(targets)))
sys.exit(1)
def _source_search(name: str, src: str) -> Union[InstInfo, None]:
"""Identify source type, retrieve contents, and populate InstInfo
if the relevant contents are found."""
root_dir = SourceDir(src)
source = InstInfo(name, root_dir.location, None)
# If a local clone of a github source already exists, prefer searching
# that instead of accessing the github API.
if source.srctype == Source.GITHUB_REPO:
# Do we have a local copy already? Use that.
user, repo = Source.get_github_user_repo(src)
assert user
assert repo
local_clone_location = RECKLESS_DIR / '.remote_sources' / user / repo
if local_clone_location.exists():
# Make sure it's the correct remote source and fetch any updates.
if _git_update(source, local_clone_location):
log.debug(f"Using local clone of {src}: "
f"{local_clone_location}")
source.source_loc = str(local_clone_location)
source.srctype = Source.GIT_LOCAL_CLONE
if source.get_inst_details():
return source
return None
def _git_clone(src: InstInfo, dest: Union[PosixPath, str]) -> bool:
log.info(f'cloning {src.srctype} {src}')
if src.srctype == Source.GITHUB_REPO:
assert 'github.com' in src.source_loc
source = f"{GITHUB_COM}" + src.source_loc.split("github.com")[-1]
elif src.srctype in [Source.LOCAL_REPO, Source.OTHER_URL,
Source.GIT_LOCAL_CLONE]:
source = src.source_loc
else:
return False
git = run(['git', 'clone', '--recurse-submodules', source, str(dest)],
stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=180)
if git.returncode != 0:
for line in git.stderr.splitlines():
log.debug(line)
if Path(dest).exists():
remove_dir(str(dest))
log.error('Failed to clone repo')
return False
return True
def _git_update(github_source: InstInfo, local_copy: PosixPath):
# Ensure this is the correct source
git = run(['git', 'remote', 'set-url', 'origin', github_source.source_loc],
cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True,
check=False, timeout=60)
assert git.returncode == 0
if git.returncode != 0:
return False
# Fetch the latest from the remote
git = run(['git', 'fetch', 'origin', '--recurse-submodules=on-demand'],
cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True,
check=False, timeout=60)
assert git.returncode == 0
if git.returncode != 0:
return False
# Find default branch
git = run(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD', '--short'],
cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True,
check=False, timeout=60)
assert git.returncode == 0
if git.returncode != 0:
return False
default_branch = git.stdout.splitlines()[0]
if default_branch != 'origin/master':
log.debug(f'UNUSUAL: fetched default branch {default_branch} for '
f'{github_source.source_loc}')
# Checkout default branch
git = run(['git', 'checkout', default_branch],
cwd=str(local_copy), stdout=PIPE, stderr=PIPE, text=True,
check=False, timeout=60)
assert git.returncode == 0
if git.returncode != 0:
return False
return True
def get_temp_reckless_dir() -> PosixPath:
random_dir = 'reckless-{}'.format(str(hash(os.times()))[-9:])
new_path = Path(tempfile.mkdtemp(prefix=random_dir))
return new_path
def add_installation_metadata(installed: InstInfo,
original_request: InstInfo):
"""Document the install request and installation details for use when
updating the plugin."""
install_dir = Path(installed.source_loc)
assert install_dir.is_dir()
if urlparse(original_request.source_loc).scheme in ['http', 'https']:
abs_source_path = original_request.source_loc
else:
abs_source_path = Path(original_request.source_loc).resolve()
data = ('installation date\n'
f'{datetime.date.today().isoformat()}\n'
'installation time\n'
f'{int(time.time())}\n'
'original source\n'
f'{abs_source_path}\n'
'requested commit\n'
f'{original_request.commit}\n'
'installed commit\n'
f'{installed.commit}\n')
with open(install_dir / '.metadata', 'w') as metadata:
metadata.write(data)
def _checkout_commit(orig_src: InstInfo,
cloned_src: InstInfo,
cloned_path: PosixPath):
# Check out and verify commit/tag if source was a repository
if orig_src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO,
Source.OTHER_URL, Source.GIT_LOCAL_CLONE]:
if orig_src.commit:
log.debug(f"Checking out {orig_src.commit}")
checkout = Popen(['git', 'checkout', orig_src.commit],
cwd=str(cloned_path),
stdout=PIPE, stderr=PIPE)
checkout.wait()
if checkout.returncode != 0:
log.warning('failed to checkout referenced '
f'commit {orig_src.commit}')
return None
else:
log.debug("using latest commit of default branch")
# Log the commit we actually used (for installation metadata)
git = run(['git', 'rev-parse', 'HEAD'], cwd=str(cloned_path),
stdout=PIPE, stderr=PIPE, text=True, check=False, timeout=60)
if git.returncode == 0:
head_commit = git.stdout.splitlines()[0]
log.debug(f'checked out HEAD: {head_commit}')
cloned_src.commit = head_commit
else:
log.debug(f'unable to collect commit: {git.stderr}')
else:
if orig_src.commit:
log.warning("unable to checkout commit/tag on non-repository "
"source")
return cloned_path
if cloned_src.subdir is not None:
return Path(cloned_src.source_loc) / cloned_src.subdir
return cloned_path
def _install_plugin(src: InstInfo) -> Union[InstInfo, None]:
"""make sure the repo exists and clone it."""
log.debug(f'Install requested from {src}.')
if RECKLESS_CONFIG is None:
log.error('reckless install directory unavailable')
return None
# Use a unique directory for each cloned repo.
tmp_path = get_temp_reckless_dir()
if not create_dir(tmp_path):
log.debug(f'failed to create {tmp_path}')
return None
clone_path = tmp_path / 'clone'
if not create_dir(tmp_path):
log.debug(f'failed to create {clone_path}')
return None
# we rename the original repo here.
plugin_path = clone_path / src.name
inst_path = Path(RECKLESS_CONFIG.reckless_dir) / src.name
if Path(clone_path).exists():
log.debug(f'{clone_path} already exists - deleting')
shutil.rmtree(clone_path)
if src.srctype == Source.DIRECTORY:
full_source_path = Path(src.source_loc)
if src.subdir:
full_source_path /= src.subdir
log.debug(("copying local directory contents from"
f" {full_source_path}"))
create_dir(clone_path)
shutil.copytree(full_source_path, plugin_path)
elif src.srctype in [Source.LOCAL_REPO, Source.GITHUB_REPO,
Source.OTHER_URL, Source.GIT_LOCAL_CLONE]:
# clone git repository to /tmp/reckless-...
if not _git_clone(src, plugin_path):
return None
# FIXME: Validate path was cloned successfully.
# Depending on how we accessed the original source, there may be install
# details missing. Searching the cloned repo makes sure we have it.
cloned_src = _source_search(src.name, str(clone_path))
log.debug(f'cloned_src: {cloned_src}')
if not cloned_src:
log.warning('failed to find plugin after cloning repo.')
return None
# If a specific commit or tag was requested, check it out now.
plugin_path = _checkout_commit(src, cloned_src, plugin_path)
if not plugin_path:
return None
# Find a suitable installer
INSTALLER = None
for inst_method in INSTALLERS:
if not (inst_method.installable() and inst_method.executable()):
continue
if inst_method.dependency_file is not None:
if inst_method.dependency_file not in os.listdir(plugin_path):
continue
log.debug(f"using installer {inst_method.name}")
INSTALLER = inst_method
break
if not INSTALLER:
log.warning('Could not find a suitable installer method for '
f'{src.name}')
return None
if not cloned_src.entry:
# The plugin entrypoint may not be discernable prior to cloning.
# Need to search the newly cloned directory, not the original
cloned_src.source_loc = str(plugin_path)
# Relocate plugin to a staging directory prior to testing
if not Path(inst_path).exists():
log.debug(f'creating {inst_path}')
create_dir(inst_path)
if not Path(inst_path / 'source').exists():
log.debug(f'creating {inst_path / "source"}')
create_dir(inst_path / 'source')
staging_path = inst_path / 'source' / src.name
log.debug(f'copying {plugin_path} tree to {staging_path}')
shutil.copytree(str(plugin_path), staging_path)
staged_src = cloned_src
# Because the source files are copied to a 'source' directory, the
# get_inst_details function no longer works. (dir must match plugin name)
# Set these manually instead.
staged_src.source_loc = str(inst_path)
staged_src.srctype = Source.DIRECTORY
# Use subdir to redirect the symlink to the actual executable location
staged_src.subdir = f'source/{src.name}'
# Create symlink in staging tree to redirect to the plugins entrypoint
log.debug(f"linking source {staging_path / cloned_src.entry} to "
f"{Path(staged_src.source_loc) / cloned_src.entry}")
log.debug(staged_src)
(Path(staged_src.source_loc) / cloned_src.entry).\
symlink_to(staging_path / cloned_src.entry)
# try it out
if INSTALLER.dependency_call:
if isinstance(INSTALLER.dependency_call, types.FunctionType):
try:
staged_src = INSTALLER.dependency_call(staged_src)
except InstallationFailure:
return None
else:
for call in INSTALLER.dependency_call:
log.debug(f"Install: invoking '{' '.join(call)}'")
if logging.root.level < logging.INFO:
pip = Popen(call, cwd=staging_path, text=True)
else:
pip = Popen(call, cwd=staging_path, stdout=PIPE,
stderr=PIPE, text=True)
pip.wait()
# FIXME: handle output of multiple calls
if pip.returncode == 0:
log.info('dependencies installed successfully')
else:
log.error('error encountered installing dependencies')
if pip.stdout:
log.debug(pip.stdout.read())
remove_dir(clone_path)
remove_dir(inst_path)
return None
staged_src.subdir = None
test_log = []
try:
test = run([Path(staged_src.source_loc).joinpath(staged_src.entry)],
cwd=str(staging_path), stdout=PIPE, stderr=PIPE,
text=True, timeout=10)
for line in test.stderr.splitlines():
test_log.append(line)
returncode = test.returncode
except TimeoutExpired:
# If the plugin is still running, it's assumed to be okay.
returncode = 0
if returncode != 0:
log.debug("plugin testing error:")
for line in test_log:
log.debug(f' {line}')
log.error('plugin testing failed')
remove_dir(clone_path)
remove_dir(inst_path)
return None
add_installation_metadata(staged_src, src)
log.info(f'plugin installed: {inst_path}')
remove_dir(clone_path)
return staged_src
def location_from_name(plugin_name: str) -> (str, str):
"""Maybe the location was passed in place of the plugin name. Check
if this looks like a filepath or URL and return that as well as the
plugin name."""
if not Path(plugin_name).exists():
try:
parsed = urlparse(plugin_name)
if parsed.scheme in ['http', 'https']:
return (plugin_name, Path(plugin_name).with_suffix('').name)
except ValueError:
pass
# No path included, return the name only.
return (None, plugin_name)
# Directory containing the plugin? The plugin name should match the dir.
if os.path.isdir(plugin_name):
return (Path(plugin_name).parent, Path(plugin_name).name)
# Possibly the entrypoint itself was passed?
elif os.path.isfile(plugin_name):
if Path(plugin_name).with_suffix('').name != Path(plugin_name).parent.name or \
not Path(plugin_name).parent.parent.exists():
# If the directory is not named for the plugin, we can't infer what
# should be done.
# FIXME: return InstInfo with entrypoint rather than source str.
return (None, plugin_name)
# We have to make inferences as to the naming here.
return (Path(plugin_name).parent.parent, Path(plugin_name).with_suffix('').name)
def _enable_installed(installed: InstInfo, plugin_name: str) -> Union[str, None]:
"""Enable the plugin in the active config file and dynamically activate
if a lightningd rpc is available."""
if not installed:
log.warning(f'{plugin_name}: installation aborted')
return None
if enable(installed.name):
return f"{installed.source_loc}"
log.error(('dynamic activation failed: '
f'{installed.name} not found in reckless directory'))
return None
def install(plugin_name: str) -> Union[str, None]:
"""Downloads plugin from source repos, installs and activates plugin.
Returns the location of the installed plugin or "None" in the case of
failure."""
assert isinstance(plugin_name, str)
# Specify a tag or commit to checkout by adding @<tag> to plugin name
if '@' in plugin_name:
log.debug("testing for a commit/tag in plugin name")
name, commit = plugin_name.split('@', 1)
else:
name = plugin_name
commit = None
# Is the install request specifying a path to the plugin?
direct_location, name = location_from_name(name)
src = None
if direct_location:
logging.debug(f"install of {name} requested from {direct_location}")
src = InstInfo(name, direct_location, name)
# Treating a local git repo as a directory allows testing
# uncommitted changes.
if src and src.srctype == Source.LOCAL_REPO:
src.srctype = Source.DIRECTORY
if not src.get_inst_details():
src = None
if not direct_location or not src:
log.debug(f"Searching for {name}")
if search(name):
global LAST_FOUND
src = LAST_FOUND
LAST_FOUND = None
src.commit = commit
log.debug(f'Retrieving {src.name} from {src.source_loc}')
else:
LAST_FOUND = None
return None
try:
installed = _install_plugin(src)
except FileExistsError as err:
log.error(f'File exists: {err.filename}')
return None
return _enable_installed(installed, plugin_name)
def uninstall(plugin_name: str) -> str:
"""dDisables plugin and deletes the plugin's reckless dir. Returns the
status of the uninstall attempt."""
assert isinstance(plugin_name, str)
log.debug(f'Uninstalling plugin {plugin_name}')
disable(plugin_name)
try:
inst = InferInstall(plugin_name)
except NotFoundError as err:
log.error(err)
return "uninstall failed"
if not Path(inst.entry).exists():
log.error("cannot find installed plugin at expected path"
f"{inst.entry}")
return "uninstall failed"
log.debug(f'looking for {str(Path(inst.entry).parent)}')
if remove_dir(str(Path(inst.entry).parent)):
log.info(f"{inst.name} uninstalled successfully.")
else:
return "uninstall failed"
return "uninstalled"
def _get_all_plugins_from_source(src: str) -> list:
"""Get all plugin directories from a source repository.
Returns a list of (plugin_name, source_url) tuples."""
plugins = []
srctype = Source.get_type(src)
if srctype == Source.UNKNOWN:
return plugins
try:
root = SourceDir(src, srctype=srctype)
root.populate()
except Exception as e:
log.debug(f"Failed to populate source {src}: {e}")
return plugins
plugins.append((root.name, src))
for item in root.contents:
if isinstance(item, SourceDir):
# Skip archive directories
if 'archive' in item.name.lower():
continue
plugins.append((item.name, src))
return plugins
def search(plugin_name: str) -> Union[InstInfo, None]:
"""searches plugin index for plugin"""
ordered_sources = RECKLESS_SOURCES.copy()
for src in RECKLESS_SOURCES:
# Search repos named after the plugin before collections
if Source.get_type(src) == Source.GITHUB_REPO:
if src.split('/')[-1].lower() == plugin_name.lower():
ordered_sources.remove(src)
ordered_sources.insert(0, src)
# Check locally before reaching out to remote repositories
for src in RECKLESS_SOURCES:
if Source.get_type(src) in [Source.DIRECTORY, Source.LOCAL_REPO]:
ordered_sources.remove(src)
ordered_sources.insert(0, src)
# First, collect all partial matches to display to user
partial_matches = []
for source in ordered_sources:
for plugin_name_found, src_url in _get_all_plugins_from_source(source):
if plugin_name.lower() in plugin_name_found.lower():
partial_matches.append((plugin_name_found, src_url))
# Display all partial matches
if partial_matches:
log.info(f"Plugins matching '{plugin_name}':")
for name, src_url in partial_matches:
log.info(f" {name} ({src_url})")
# Now try exact match for installation purposes
exact_match = None
for source in ordered_sources:
srctype = Source.get_type(source)
if srctype == Source.UNKNOWN:
log.debug(f'cannot search {srctype} {source}')
continue
if srctype in [Source.DIRECTORY, Source.LOCAL_REPO,
Source.GITHUB_REPO, Source.OTHER_URL]:
found = _source_search(plugin_name, source)
if found:
log.debug(f"{found}, {found.srctype}")
exact_match = found
break
if exact_match:
log.info(f"found {exact_match.name} in source: {exact_match.source_loc}")
log.debug(f"entry: {exact_match.entry}")
if exact_match.subdir:
log.debug(f'sub-directory: {exact_match.subdir}')
global LAST_FOUND
# Stashing the search result saves install() a call to _source_search.
LAST_FOUND = exact_match
return str(exact_match.source_loc)
if not partial_matches:
log.info("Search exhausted all sources")
return None
class RPCError(Exception):
"""lightning-cli fails to connect to lightningd RPC"""
def __init__(self, err):
self.err = err
def __str__(self):
return 'RPCError({self.err})'
class CLIError(Exception):
"""lightningd error response"""
def __init__(self, code, message):
self.code = code
self.message = message
def __str__(self):
return f'CLIError({self.code} {self.message})'
def lightning_cli(*cli_args, timeout: int = 15) -> dict:
"""Interfaces with Core-Lightning via CLI using any configured options."""
cmd = LIGHTNING_CLI_CALL.copy()
cmd.extend(cli_args)
clncli = run(cmd, stdout=PIPE, stderr=PIPE, check=False, timeout=timeout)
out = clncli.stdout.decode()
if len(out) > 0 and out[0] == '{':
# If all goes well, a json object is typically returned
out = json.loads(out.replace('\n', ''))
else:
# help, -V, etc. may not return json, so stash it here.
out = {'content': out}
if clncli.returncode == 0:
return out
if clncli.returncode == 1:
# RPC doesn't like our input
# output contains 'code' and 'message'
raise CLIError(out['code'], out['message'])
# RPC may not be available - i.e., lightningd not running, using
# alternate config.
err = clncli.stderr.decode()
raise RPCError(err)
def enable(plugin_name: str):
"""dynamically activates plugin and adds to config (persistent)"""
assert isinstance(plugin_name, str)
try:
inst = InferInstall(plugin_name)
except NotFoundError as err:
log.error(err)
return None
path = inst.entry
if not Path(path).exists():
log.error(f'cannot find installed plugin at expected path {path}')
return None
log.debug(f'activating {plugin_name}')
try:
lightning_cli('plugin', 'start', path)
except CLIError as err:
if 'already registered' in err.message:
log.debug(f'{inst.name} is already running')
return None
else:
log.error(f'reckless: {inst.name} failed to start!')
log.error(err)
return None
except RPCError:
log.info(('lightningd rpc unavailable. '
'Skipping dynamic activation.'))
RECKLESS_CONFIG.enable_plugin(path)
log.info(f'{inst.name} enabled')
return 'enabled'
def disable(plugin_name: str):
"""reckless disable <plugin>
deactivates an installed plugin"""
assert isinstance(plugin_name, str)
try:
inst = InferInstall(plugin_name)
except NotFoundError as err:
log.warning(f'failed to disable: {err}')
return None
path = inst.entry
if not Path(path).exists():
sys.stderr.write(f'Could not find plugin at {path}\n')
return None
log.debug(f'deactivating {plugin_name}')
try:
lightning_cli('plugin', 'stop', path)
except CLIError as err:
if err.code == -32602:
log.debug('plugin not currently running')
else:
log.error('lightning-cli plugin stop failed')
logging.error(err)
return None
except RPCError:
log.debug(('lightningd rpc unavailable. '
'Skipping dynamic deactivation.'))
RECKLESS_CONFIG.disable_plugin(path)
log.info(f'{inst.name} disabled')
return 'disabled'
def load_config(reckless_dir: Union[str, None] = None,
network: str = 'bitcoin') -> Config:
"""Initial directory discovery and config file creation."""
net_conf = None
# Does the lightning-cli already reference an explicit config?
try:
active_config = lightning_cli('listconfigs', timeout=10)['configs']
if 'conf' in active_config:
net_conf = LightningBitcoinConfig(path=active_config['conf']
['value_str'])
except RPCError:
pass
if reckless_dir is None:
reckless_dir = Path(LIGHTNING_DIR) / 'reckless'
else:
if not os.path.isabs(reckless_dir):
reckless_dir = Path.cwd() / reckless_dir
if LIGHTNING_CONFIG:
network_path = LIGHTNING_CONFIG
else:
network_path = Path(LIGHTNING_DIR) / network / 'config'
reck_conf_path = Path(reckless_dir) / f'{network}-reckless.conf'
if net_conf:
if str(network_path) != net_conf.conf_fp:
reckless_abort('reckless configuration does not match lightningd:\n'
f'reckless network config path: {network_path}\n'
f'lightningd active config: {net_conf.conf_fp}')
else:
# The network-specific config file (bitcoin by default)
net_conf = LightningBitcoinConfig(path=network_path)
# Reckless manages plugins here.
try:
reckless_conf = RecklessConfig(path=reck_conf_path)
except FileNotFoundError:
reckless_abort('reckless config file could not be written: '
+ str(reck_conf_path))
if not net_conf:
reckless_abort('Error: could not load or create the network specific lightningd'
' config (default .lightning/bitcoin)')
net_conf.editConfigFile(f'include {reckless_conf.conf_fp}', None)
return reckless_conf
def get_sources_file() -> str:
return str(RECKLESS_DIR / '.sources')
def sources_from_file() -> list:
sources_file = get_sources_file()
read_sources = []
with open(sources_file, 'r') as f:
for src in f.readlines():
if len(src.strip()) > 0:
read_sources.append(src.strip())
return read_sources
def load_sources() -> list:
"""Look for the repo sources file."""
sources_file = get_sources_file()
# This would have been created if possible
if not Path(sources_file).exists():
log.debug('Warning: Reckless requires write access')
Config(path=str(sources_file),
default_text='https://github.com/lightningd/plugins')
return ['https://github.com/lightningd/plugins']
return sources_from_file()
def add_source(src: str):
"""Additional git repositories, directories, etc. are passed here."""
assert isinstance(src, str)
# Is it a file?
maybe_path = os.path.realpath(src)
sources = Config(path=str(get_sources_file()),
default_text='https://github.com/lightningd/plugins')
if Path(maybe_path).exists():
if os.path.isdir(maybe_path):
sources.editConfigFile(src, None)
elif 'github.com' in src or 'http://' in src or 'https://' in src:
sources.editConfigFile(src, None)
else:
log.warning(f'failed to add source {src}')
return None
return sources_from_file()
def remove_source(src: str):
"""Remove a source from the sources file."""
assert isinstance(src, str)
if src in sources_from_file():
my_file = Config(path=get_sources_file(),
default_text='https://github.com/lightningd/plugins')
my_file.editConfigFile(None, src)
log.info('plugin source removed')
else:
log.warning(f'source not found: {src}')
return sources_from_file()
def list_source():
"""Provide the user with all stored source repositories."""
for src in sources_from_file():
log.info(src)
return sources_from_file()
class UpdateStatus(Enum):
SUCCESS = 0
LATEST = 1
UNINSTALLED = 2
ERROR = 3
METADATA_MISSING = 4
REFUSING_UPDATE = 5
def update_plugin(plugin_name: str) -> tuple:
"""Check for an installed plugin, if metadata for it exists, update
to the latest available while using the same source."""
log.info(f"updating {plugin_name}")
if not (Path(RECKLESS_CONFIG.reckless_dir) / plugin_name).exists():
log.error(f'{plugin_name} is not installed')
return (None, UpdateStatus.UNINSTALLED)
metadata_file = Path(RECKLESS_CONFIG.reckless_dir) / plugin_name / '.metadata'
if not metadata_file.exists():
log.warning(f"no metadata file for {plugin_name}")
return (None, UpdateStatus.METADATA_MISSING)
metadata = {'installation date': None,
'installation time': None,
'original source': None,
'requested commit': None,
'installed commit': None,
}
with open(metadata_file, "r") as meta:
metadata_lines = meta.readlines()
for line_no, line in enumerate(metadata_lines):
if line_no > 0 and metadata_lines[line_no - 1].strip() in metadata:
metadata.update({metadata_lines[line_no - 1].strip(): line.strip()})
for key in metadata:
if metadata[key].lower() == 'none':
metadata[key] = None
log.debug(f'{plugin_name} previous installation metadata: {str(metadata)}')
if metadata['requested commit']:
log.warning(f'refusing to upgrade {plugin_name}@{metadata["requested commit"]} due to previously requested tag/commit')
return (None, UpdateStatus.REFUSING_UPDATE)
src = InstInfo(plugin_name,
metadata['original source'], None)
if not src.get_inst_details():
log.error(f'cannot locate {plugin_name} in original source {metadata["original_source"]}')
return (None, UpdateStatus.ERROR)
repo_commit = src.get_repo_commit()
if not repo_commit:
log.debug('source commit not available')
else:
log.debug(f'source commit: {repo_commit}')
if repo_commit and repo_commit == metadata['installed commit']:
log.info(f'Installed {plugin_name} is already latest @{repo_commit}')
return (None, UpdateStatus.LATEST)
uninstall(plugin_name)
try:
installed = _install_plugin(src)
except FileExistsError as err:
log.error(f'File exists: {err.filename}')
return (None, UpdateStatus.ERROR)
result = _enable_installed(installed, plugin_name)
if result:
return (result, UpdateStatus.SUCCESS)
return (result, UpdateStatus.ERROR)
def update_plugins(plugin_name: str):
"""user requested plugin upgrade(s)"""
if plugin_name:
installed = update_plugin(plugin_name)
if not installed[0] and installed[1] != UpdateStatus.LATEST:
log.error(f'{plugin_name} update aborted')
return installed[0]
log.info("updating all plugins")
update_results = []
for plugin in os.listdir(RECKLESS_CONFIG.reckless_dir):
if not (Path(RECKLESS_CONFIG.reckless_dir) / plugin).is_dir():
continue
if len(plugin) > 0 and plugin[0] == '.':
continue
update_results.append(update_plugin(plugin)[0])
return update_results
def report_version() -> str:
"""return reckless version"""
log.info(__VERSION__)
log.add_result(__VERSION__)
def unpack_json_arg(json_target: str) -> list:
"""validate json for any command line targets passes as a json array"""
try:
targets = json.loads(json_target)
except json.decoder.JSONDecodeError:
return None
if isinstance(targets, list):
return targets
log.warning(f'input {target_list} is not a json array')
return None
class StoreIdempotent(argparse.Action):
"""Make the option idempotent. This adds a secondary argument that doesn't
get reinitialized. The downside is it"""
def __init__(self, option_strings, dest, nargs=None, **kwargs):
super().__init__(option_strings, dest, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
if option_string:
setattr(namespace, self.dest, values)
setattr(namespace, f'{self.dest}_idempotent', values)
class StoreTrueIdempotent(argparse._StoreConstAction):
"""Make the option idempotent"""
def __init__(self, option_strings, dest, default=False,
required=False, nargs=None, const=None, help=None):
super().__init__(option_strings=option_strings, dest=dest,
const=const, help=help)
def __call__(self, parser, namespace, values, option_string=None):
if option_string:
setattr(namespace, self.dest, True)
setattr(namespace, f'{self.dest}_idempotent', True)
def process_idempotent_args(args):
"""Swap idempotently set arguments back in for the default arg names."""
original_args = dict(vars(args))
for arg, value in original_args.items():
if f"{arg}_idempotent" in vars(args):
setattr(args, f"{arg}", vars(args)[f"{arg}_idempotent"])
delattr(args, f"{arg}_idempotent")
return args
if __name__ == '__main__':
parser = argparse.ArgumentParser()
cmd1 = parser.add_subparsers(dest='cmd1', help='command',
required=False)
install_cmd = cmd1.add_parser('install', help='search for and install a '
'plugin, then test and activate')
install_cmd.add_argument('targets', type=str, nargs='*')
install_cmd.set_defaults(func=install)
uninstall_cmd = cmd1.add_parser('uninstall', help='deactivate a plugin '
'and remove it from the directory')
uninstall_cmd.add_argument('targets', type=str, nargs='*')
uninstall_cmd.set_defaults(func=uninstall)
search_cmd = cmd1.add_parser('search', help='search for a plugin from '
'the available source repositories')
search_cmd.add_argument('targets', type=str, nargs='*')
search_cmd.set_defaults(func=search)
enable_cmd = cmd1.add_parser('enable', help='dynamically enable a plugin '
'and update config')
enable_cmd.add_argument('targets', type=str, nargs='*')
enable_cmd.set_defaults(func=enable)
disable_cmd = cmd1.add_parser('disable', help='disable a plugin')
disable_cmd.add_argument('targets', type=str, nargs='*')
disable_cmd.set_defaults(func=disable)
source_parser = cmd1.add_parser('source', help='manage plugin search '
'sources')
source_subs = source_parser.add_subparsers(dest='source_subs',
required=True)
list_parse = source_subs.add_parser('list', help='list available plugin '
'sources (repositories)')
list_parse.set_defaults(func=list_source)
source_add = source_subs.add_parser('add', help='add a source repository')
source_add.add_argument('targets', type=str, nargs='*')
source_add.set_defaults(func=add_source)
source_rem = source_subs.add_parser('remove', aliases=['rem', 'rm'],
help='remove a plugin source '
'repository')
source_rem.add_argument('targets', type=str, nargs='*')
source_rem.set_defaults(func=remove_source)
update = cmd1.add_parser('update', help='update plugins to lastest version')
update.add_argument('targets', type=str, nargs='*')
update.set_defaults(func=update_plugins)
help_cmd = cmd1.add_parser('help', help='for contextual help, use '
'"reckless <cmd> -h"')
help_cmd.add_argument('targets', type=str, nargs='*')
help_cmd.set_defaults(func=help_alias)
parser.add_argument('-V', '--version',
action=StoreTrueIdempotent, const=None,
help='print version and exit')
all_parsers = [parser, install_cmd, uninstall_cmd, search_cmd, enable_cmd,
disable_cmd, list_parse, source_add, source_rem, help_cmd,
update]
for p in all_parsers:
# This default depends on the .lightning directory
p.add_argument('-d', '--reckless-dir', action=StoreIdempotent,
help='specify a data directory for reckless to use',
type=str, default=None)
p.add_argument('-l', '--lightning', type=str, action=StoreIdempotent,
help='lightning data directory '
'(default:~/.lightning)',
default=Path.home().joinpath('.lightning'))
p.add_argument('-c', '--conf', action=StoreIdempotent,
help=' config file used by lightningd',
type=str,
default=None)
p.add_argument('-r', '--regtest', action=StoreTrueIdempotent)
p.add_argument('--network', action=StoreIdempotent,
help="specify a network to use (default: bitcoin)",
type=str)
p.add_argument('-v', '--verbose', action=StoreTrueIdempotent,
const=None)
p.add_argument('-j', '--json', action=StoreTrueIdempotent,
help='output in json format')
args = parser.parse_args()
args = process_idempotent_args(args)
if args.json:
log.capture = True
stdout_redirect = PIPE
stderr_redirect = PIPE
else:
stdout_redirect = None
stderr_redirect = None
if args.verbose:
logging.root.setLevel(logging.DEBUG)
else:
logging.root.setLevel(logging.INFO)
NETWORK = 'regtest' if args.regtest else 'bitcoin'
SUPPORTED_NETWORKS = ['bitcoin', 'regtest', 'liquid', 'liquid-regtest',
'signet', 'testnet', 'testnet4']
if args.version:
report_version()
elif args.cmd1 is None:
parser.print_help(sys.stdout)
sys.exit(1)
if args.network:
if args.network in SUPPORTED_NETWORKS:
NETWORK = args.network
else:
log.error(f"{args.network} network not supported")
LIGHTNING_DIR = Path(args.lightning)
# This env variable is set under CI testing
LIGHTNING_CLI_CALL = [os.environ.get('LIGHTNING_CLI')]
if LIGHTNING_CLI_CALL == [None]:
LIGHTNING_CLI_CALL = ['lightning-cli']
if NETWORK != 'bitcoin':
LIGHTNING_CLI_CALL.append(f'--network={NETWORK}')
if LIGHTNING_DIR != Path.home().joinpath('.lightning'):
LIGHTNING_CLI_CALL.append(f'--lightning-dir={LIGHTNING_DIR}')
if args.reckless_dir:
RECKLESS_DIR = Path(args.reckless_dir)
else:
RECKLESS_DIR = Path(LIGHTNING_DIR) / 'reckless'
LIGHTNING_CONFIG = args.conf
RECKLESS_CONFIG = load_config(reckless_dir=str(RECKLESS_DIR),
network=NETWORK)
RECKLESS_SOURCES = load_sources()
API_GITHUB_COM = 'https://api.github.com'
GITHUB_COM = 'https://github.com'
# Used for blackbox testing to avoid hitting github servers
if 'REDIR_GITHUB_API' in os.environ:
API_GITHUB_COM = os.environ['REDIR_GITHUB_API']
if 'REDIR_GITHUB' in os.environ:
GITHUB_COM = os.environ['REDIR_GITHUB']
GITHUB_API_FALLBACK = False
if 'GITHUB_API_FALLBACK' in os.environ:
GITHUB_API_FALLBACK = os.environ['GITHUB_API_FALLBACK']
if 'targets' in args: # and len(args.targets) > 0:
if args.func.__name__ == 'help_alias':
args.func(args.targets)
sys.exit(0)
# Catch a missing argument so that we can overload functions.
if len(args.targets) == 0:
args.targets=[None]
for target in args.targets:
# Accept single item arguments, or a json array
try:
target_list = unpack_json_arg(target)
if target_list:
for tar in target_list:
log.add_result(args.func(tar))
else:
log.add_result(args.func(target))
except TypeError:
if len(args.targets) == 1:
log.add_result(args.func(target))
elif 'func' in args:
log.add_result(args.func())
if log.capture:
log.reply_json()