Files
palladum-lightning/common/jsonrpc_io.c
Rusty Russell d2a6091149 common: increase jsonrpc_io buffer size temporarily to aggrevate perf issues.
A client can do this by sending a large request, so this allows us to see what
happens if they do that, even though 1MB (2MB buffer) is more than we need.

This drives our performance through the floor: see next patch which gets
us back on track.

tests/test_coinmoves.py::test_generate_coinmoves (2,000,000, sqlite3):
	Time (from start to end of l2 node):	 271 seconds **WAS 135**
	Worst latency:				 105 seconds **WAS 12.1**

Signed-off-by: Rusty Russell <rusty@rustcorp.com.au>
2025-11-20 16:30:50 +10:30

153 lines
3.7 KiB
C

#include "config.h"
#include <ccan/io/io.h>
#include <ccan/membuf/membuf.h>
#include <ccan/tal/str/str.h>
#include <common/jsonrpc_io.h>
#include <common/utils.h>
#include <errno.h>
#include <unistd.h>
#define READ_CHUNKSIZE (1024*1024)
struct jsonrpc_io {
MEMBUF(char) membuf;
jsmn_parser parser;
jsmntok_t *toks;
/* Amount just read by io_read_partial */
size_t bytes_read;
};
struct jsonrpc_io *jsonrpc_io_new(const tal_t *ctx)
{
struct jsonrpc_io *json_in;
const size_t bufsize = READ_CHUNKSIZE * 2;
json_in = tal(ctx, struct jsonrpc_io);
json_in->bytes_read = 0;
membuf_init(&json_in->membuf,
tal_arr(json_in, char, bufsize),
bufsize, membuf_tal_resize);
json_in->toks = toks_alloc(json_in);
jsmn_init(&json_in->parser);
return json_in;
}
/* Empty new bytes read into our unparsed buffer */
static void add_newly_read(struct jsonrpc_io *json_in)
{
/* Now added it to our unparsed buffer */
assert(json_in->bytes_read <= membuf_num_space(&json_in->membuf));
membuf_added(&json_in->membuf, json_in->bytes_read);
json_in->bytes_read = 0;
}
const char *jsonrpc_newly_read(struct jsonrpc_io *json_in,
size_t *len)
{
*len = json_in->bytes_read;
add_newly_read(json_in);
return membuf_space(&json_in->membuf) - *len;
}
const char *jsonrpc_io_parse(const tal_t *ctx,
struct jsonrpc_io *json_in,
const jsmntok_t **toks,
const char **buf)
{
bool complete;
/* If we're read any more, add that */
add_newly_read(json_in);
*toks = NULL;
*buf = NULL;
/* Our JSON parser is pretty good at incremental parsing, but
* `getrawblock` gives a giant 2MB token, which forces it to re-parse
* every time until we have all of it. However, we can't complete a
* JSON object without a '}', so we do a cheaper check here.
*/
if (!memchr(membuf_elems(&json_in->membuf), '}',
membuf_num_elems(&json_in->membuf)))
return NULL;
if (!json_parse_input(&json_in->parser, &json_in->toks,
membuf_elems(&json_in->membuf),
membuf_num_elems(&json_in->membuf),
&complete)) {
return tal_fmt(ctx, "Failed to parse RPC JSON response '%.*s'",
(int)membuf_num_elems(&json_in->membuf),
membuf_elems(&json_in->membuf));
}
if (!complete)
return NULL;
/* Must have jsonrpc to be valid! */
if (!json_get_member(membuf_elems(&json_in->membuf),
json_in->toks,
"jsonrpc")) {
return tal_fmt(ctx,
"JSON-RPC message does not contain \"jsonrpc\" field: '%.*s'",
(int)membuf_num_elems(&json_in->membuf),
membuf_elems(&json_in->membuf));
}
*toks = json_in->toks;
*buf = membuf_elems(&json_in->membuf);
return NULL;
}
void jsonrpc_io_parse_done(struct jsonrpc_io *json_in)
{
size_t bytes_parsed = json_in->toks[0].end;
membuf_consume(&json_in->membuf, bytes_parsed);
jsmn_init(&json_in->parser);
toks_reset(json_in->toks);
}
struct io_plan *jsonrpc_io_read_(struct io_conn *conn,
struct jsonrpc_io *json_in,
struct io_plan *(*next)(struct io_conn *,
void *),
void *arg)
{
/* Make sure there's more room */
membuf_prepare_space(&json_in->membuf, READ_CHUNKSIZE);
/* Try to read more. */
json_in->bytes_read = 0;
return io_read_partial(conn,
membuf_space(&json_in->membuf),
membuf_num_space(&json_in->membuf),
&json_in->bytes_read,
next, arg);
}
bool jsonrpc_sync_read(struct jsonrpc_io *json_in, int infd)
{
int r;
/* Make sure there's more room */
membuf_prepare_space(&json_in->membuf, READ_CHUNKSIZE);
/* Try to read more. */
r = read(infd,
membuf_space(&json_in->membuf),
membuf_num_space(&json_in->membuf));
if (r < 0)
return false;
if (r == 0) {
errno = 0;
return false;
}
json_in->bytes_read = r;
return true;
}