diff --git a/tests/conftest.py b/tests/conftest.py index bd0b7a309275..7cf847a6f2eb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,6 +26,23 @@ def pytest_configure(config): "slow_test: slow tests aren't run under Valgrind") config.addinivalue_line("markers", "openchannel: Limit this test to only run 'v1' or 'v2' openchannel protocol") + config.addinivalue_line("markers", + "vls: mark test as using VLS (Validating Lightning Signer) for signing operations") + + # VLS testing is opt-in via exactly `-m vls`. Without it, vls-marked + # tests still run but are forced to use_vls=False (see fixtures.py). + # With it, abort the session early if the signer is not available. + if (config.getoption("markexpr") or "").strip() == "vls": + if not os.environ.get('REMOTE_SIGNER_PATH') and not os.environ.get('VLS_AUTO_BUILD'): + raise pytest.UsageError( + 'VLS tests selected via `-m vls` but neither REMOTE_SIGNER_PATH ' + '(path to a pre-built vlsd) nor VLS_AUTO_BUILD=1 is set.' + ) + if os.environ.get('REMOTE_SIGNER_PATH') and os.environ.get('VLS_AUTO_BUILD'): + raise pytest.UsageError( + 'REMOTE_SIGNER_PATH and' + 'VLS_AUTO_BUILD are mutually exclusive' + ) def pytest_runtest_setup(item): diff --git a/tests/fixtures.py b/tests/fixtures.py index 186d8723758b..754d2cec3fa1 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -1,6 +1,7 @@ -from utils import TEST_NETWORK, VALGRIND # noqa: F401,F403 -from pyln.testing.fixtures import directory, test_base_dir, test_name, chainparams, node_factory, bitcoind, teardown_checks, db_provider, executor, setup_logging, jsonschemas # noqa: F401,F403 +from utils import TEST_NETWORK, BITCOIND_CONFIG, VALGRIND # noqa: F401,F403 +from pyln.testing.fixtures import directory, test_base_dir, test_name, chainparams, bitcoind, teardown_checks, db_provider, executor, setup_logging, jsonschemas # noqa: F401,F403 from pyln.testing import utils +from pyln.testing.utils import NodeFactory as _NodeFactory from utils import COMPAT from pathlib import Path @@ -11,20 +12,80 @@ import subprocess import tempfile import time +from pyln.testing.utils import env +from vls import ValidatingLightningSignerD + + +class NodeFactory(_NodeFactory): + """Make `use_vls` option reaches the `LightningNode.__init__` in + `NodeFactory` as node-level kwarg instead of being forwarded as a + lightningd CLI flag.""" + + def split_options(self, opts): + node_opts, cli_opts = super().split_options(opts) + if 'use_vls' in cli_opts: + node_opts['use_vls'] = cli_opts.pop('use_vls') + return node_opts, cli_opts @pytest.fixture def node_cls(): return LightningNode +# Override the default fixture to use the new `NodeFactory` which supports `use_vls` as a node-level option. + + +@pytest.fixture +def node_factory(request, directory, test_name, bitcoind, executor, db_provider, teardown_checks, node_cls, jsonschemas): # noqa: F811 + nf = NodeFactory( + request, + test_name, + bitcoind, + executor, + directory=directory, + db_provider=db_provider, + node_cls=node_cls, + jsonschemas=jsonschemas, + ) + + yield nf + ok, errs = nf.killall([not n.may_fail for n in nf.nodes]) + + for e in errs: + print(e.format()) + + if not ok: + raise Exception("At least one lightning exited with unexpected non-zero return code") + + +@pytest.fixture +def use_vls(pytestconfig): + # This fixture is used to mark tests as using VLS. It doesn't do anything + # by itself, but it allows us to select tests with `-m vls` and to skip + # them if the signer is not available. + markerexpr = pytestconfig.getoption("markexpr") or "" + return "vls" in markerexpr.split() + class LightningNode(utils.LightningNode): - def __init__(self, *args, **kwargs): + def __init__(self, *args, use_vls=False, **kwargs): # Yes, we really want to test the local development version, not # something in out path. kwargs["executable"] = "lightningd/lightningd" utils.LightningNode.__init__(self, *args, **kwargs) + # node_id is pyln's first positional arg; keep it for the VLS label. + self._node_id = args[0] if args else kwargs["node_id"] + self.network = TEST_NETWORK + + if use_vls is True: + self.vls_mode = "cln:socket" + elif use_vls is False: + self.vls_mode = "cln:native" + + self.use_vls = self.vls_mode == "cln:socket" + self.vlsd: ValidatingLightningSignerD | None = None + # Avoid socket path name too long on Linux if os.uname()[0] == 'Linux' and \ len(str(self.lightning_dir / TEST_NETWORK / 'lightning-rpc')) >= 108: @@ -61,6 +122,56 @@ def __init__(self, *args, **kwargs): accts_db = self.db.provider.get_db('', 'accounts', 0) self.daemon.opts['bookkeeper-db'] = accts_db.get_dsn() + def start(self, wait_for_bitcoind_sync=True, stderr_redir=False): + # Start the signer first and wait for it to be up, otherwise lightningd + # hangs on the hsmd init message. + if self.use_vls: + self.vlsd = ValidatingLightningSignerD( + lightning_dir=self.lightning_dir, + node_id=self._node_id, + network=self.network, + ) + self.daemon.opts["subdaemon"] = f"hsmd:{self.vlsd.remote_socket}" + + # FIXME: VLS doesn't implement WIRE_HSMD_SIGN_SPLICE_TX, so lightningd + # would fatal() during hsm_init if OPT_SPLICE (bit 62) is offered. + # Strip the optional splice bit (63 = OPTIONAL_FEATURE(62)). + self.daemon.opts["dev-force-features"] = "-63" + + # These are consumed by lightningd's remote_hsmd_socket bridge; + # they go on the daemon's per-proc env so each node gets its own + # signer configuration (see test-env precedence in pyln). + self.daemon.env["VLS_PORT"] = str(self.vlsd.port) + self.daemon.env["VLS_LSS"] = env("LSS_URI", "") + self.daemon.env["VLS_NETWORK"] = env("VLS_NETWORK", self.network) + self.daemon.env["BITCOIND_RPC_URL"] = env( + "BITCOIND_RPC_URL", + f"http://{BITCOIND_CONFIG['rpcuser']}:{BITCOIND_CONFIG['rpcpassword']}@127.0.0.1:{self.bitcoin.rpcport}", + ) + # We must feed `remote_hsmd_socket` (via VLS_CLN_VERSION) *just* + # the bare version, otherwise the check fails and lightningd + # exits before spawning hsmd. + raw = subprocess.check_output( + [self.daemon.executable, "--version"] + ).decode("ascii") + cln_version = next( + line for line in reversed(raw.splitlines()) if line.strip() + ) + self.daemon.env["VLS_CLN_VERSION"] = env("VLS_CLN_VERSION", cln_version) + self.vlsd.start() + + utils.LightningNode.start( + self, + wait_for_bitcoind_sync=wait_for_bitcoind_sync, + stderr_redir=stderr_redir, + ) + + def stop(self, timeout: int = 10): + utils.LightningNode.stop(self, timeout=timeout) + if self.vlsd is not None: + rc = self.vlsd.stop(timeout=timeout) + print(f"VLSD2 exited with rc={rc}") + class CompatLevel(object): """An object that encapsulates the compat-level of our build. diff --git a/tests/test_pay.py b/tests/test_pay.py index 9d1fc2f619d5..b289476a8f7e 100644 --- a/tests/test_pay.py +++ b/tests/test_pay.py @@ -22,6 +22,45 @@ import unittest +@pytest.mark.vls +@pytest.mark.openchannel('v1') +@pytest.mark.openchannel('v2') +def test_vls_simple_send(node_factory, use_vls): + l1, l2 = node_factory.line_graph(2, opts=[{'use_vls': use_vls}, {}]) + + inv = l2.rpc.invoice(123000, 'test_vls_simple', 'description')['bolt11'] + details = l1.dev_pay(inv, dev_use_shadow=False) + assert details['status'] == 'complete' + assert details['amount_msat'] == Millisatoshi(123000) + assert details['destination'] == l2.info['id'] + + +@pytest.mark.vls +@pytest.mark.openchannel('v1') +@pytest.mark.openchannel('v2') +def test_vls_simple_receive(node_factory, use_vls): + l1, l2 = node_factory.line_graph(2, opts=[{}, {'use_vls': use_vls}]) + + inv = l2.rpc.invoice(123000, 'test_vls_simple', 'description')['bolt11'] + details = l1.dev_pay(inv, dev_use_shadow=False) + assert details['status'] == 'complete' + assert details['amount_msat'] == Millisatoshi(123000) + assert details['destination'] == l2.info['id'] + + +@pytest.mark.vls +@pytest.mark.openchannel('v1') +@pytest.mark.openchannel('v2') +def test_vls_simple_route(node_factory, use_vls): + l1, l2, l3 = node_factory.line_graph(3, opts=[{}, {}, {'use_vls': use_vls}]) + + inv = l3.rpc.invoice(123000, 'test_vls_simple', 'description')['bolt11'] + details = l1.rpc.pay(inv, dev_use_shadow=False) + assert details['status'] == 'complete' + assert details['amount_msat'] == Millisatoshi(123000) + assert details['destination'] == l3.info['id'] + + @pytest.mark.openchannel('v1') @pytest.mark.openchannel('v2') def test_pay(node_factory): diff --git a/tests/utils.py b/tests/utils.py index 8647c16e6436..c426859737e1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -2,7 +2,7 @@ from pyln.testing.utils import env, only_one, wait_for, write_config, TailableProc, sync_blockheight, wait_channel_quiescent, get_tx_p2wsh_outnum, mine_funding_to_announce, scid_to_int # noqa: F401 import bitstring from pyln.client import Millisatoshi -from pyln.testing.utils import EXPERIMENTAL_DUAL_FUND +from pyln.testing.utils import EXPERIMENTAL_DUAL_FUND, BITCOIND_CONFIG # noqa: F401 from pyln.proto.onion import TlvPayload import struct import subprocess diff --git a/tests/vls.py b/tests/vls.py new file mode 100644 index 000000000000..b2e3624fd034 --- /dev/null +++ b/tests/vls.py @@ -0,0 +1,103 @@ +from pyln.testing.utils import TailableProc, env, reserve_unused_port +from pathlib import Path +from subprocess import run +import logging +import os + + +REPOS = ["https://gitlab.com/lightning-signer/validating-lightning-signer.git"] + + +def _resolve_executable(datadir: Path) -> Path: + """ + Return the path where the vlsd executable can be found. + """ + prebuilt = os.environ.get("REMOTE_SIGNER_PATH") + if prebuilt: + path = Path(os.path.expanduser(prebuilt)).resolve() + if not path.exists(): + raise RuntimeError(f"REMOTE_SIGNER_PATH={prebuilt} does not exist") + return path + + if os.environ.get("VLS_AUTO_BUILD") != "1": + raise RuntimeError( + "No VLS binary available: set REMOTE_SIGNER_PATH to a pre-built " + "vlsd, or VLS_AUTO_BUILD=1 to clone and compile it." + ) + + signer_folder = REPOS[0].split("/")[-1].removesuffix(".git") + vlsd_dir = (datadir / signer_folder).resolve() + logging.info(f"Cloning {REPOS[0]} into {vlsd_dir}") + run(["git", "clone", REPOS[0]], cwd=datadir, check=True, timeout=120) + cargo_target_dir = os.environ.get("CARGO_TARGET_DIR") + target_dir = ( + Path(os.path.expanduser(cargo_target_dir)).resolve() + if cargo_target_dir + else vlsd_dir / "target" + ) + logging.info(f"Building vlsd in {vlsd_dir} (target dir: {target_dir})") + run(["cargo", "build", "--features", "developer"], + cwd=vlsd_dir, check=True, timeout=600) + return (target_dir / "debug" / "vlsd").resolve() + + +class ValidatingLightningSignerD(TailableProc): + def __init__(self, lightning_dir, node_id, network): + # Each node gets its own datadir and socket, so multiple nodes can run + # their own signer in parallel even when sharing a prebuilt binary. + self.datadir = (Path(lightning_dir) / "vlsd").resolve() + self.datadir.mkdir(exist_ok=True, parents=True) + + self.bin_dir = str(_resolve_executable(self.datadir)) + self.executable = self.bin_dir / "vlsd" + self.port = reserve_unused_port() + self.rpc_port = reserve_unused_port() + self.remote_socket = (Path(self.bin_dir) / "remote_hsmd_socket").resolve() + if not self.remote_socket.exists(): + raise RuntimeError( + f"remote_hsmd_socket binary not found next to vlsd at {self.remote_socket}" + ) + + TailableProc.__init__(self, self.datadir, verbose=True) + # Set ALLOWLIST on the signer's proc env instead of os.environ so + # multiple signers can coexist without the test coordinator leaking + # state between them. + allowlist = os.environ.get("REMOTE_SIGNER_ALLOWLIST") + if allowlist: + self.env["ALLOWLIST"] = allowlist + else: + logging.warning( + "REMOTE_SIGNER_ALLOWLIST is not set; vlsd will start without " + "an allowlist. Point it at an absolute path to override." + ) + self.env["VLS_AUTOAPPROVE"] = env("VLS_AUTO_APPROVE", "1") + self.opts = [ + f"--network={network}", + f"--datadir={self.datadir}", + f"--connect=http://localhost:{self.port}", + f"--rpc-server-port={self.rpc_port}", + f"--rpc-user=bitcoind", + f"--rpc-pass=bitcoind" + ] + self.prefix = "vlsd-%d" % node_id + + @property + def cmd_line(self): + return [self.executable] + self.opts + + def start(self, stdin=None, stdout_redir=True, stderr_redir=True): + TailableProc.start(self, stdin, stdout_redir, stderr_redir) + self.wait_for_log("vlsd git_desc") + logging.info("vlsd started") + + def stop(self, timeout=10): + logging.info("stopping vlsd") + rc = TailableProc.stop(self, timeout) + logging.info("vlsd stopped") + self.logs_catchup() + return rc + + def __del__(self): + # __init__ may have raised before TailableProc finished setup. + if hasattr(self, "stdout_read"): + self.logs_catchup()