Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
52a2a1e
make: Add configuration detection and linking of libpq
cdecker Aug 30, 2019
4f2c8a2
pytest: Add db_provider and db instances for configurable backends
cdecker Aug 30, 2019
42118f2
cli: Add command line option to specify the wallet location
cdecker Sep 2, 2019
5d1ac31
postgres: Add postgres statement rewriting support
cdecker Sep 2, 2019
30af216
db: Move remainder of the sqlite3 into the apropriate file
cdecker Sep 3, 2019
bdcda55
db: Reorder migrations to reflect their relationship
cdecker Sep 5, 2019
6d0172d
db: Switch statement lookup to use the original query instead
cdecker Sep 5, 2019
112a48d
db: Adjust some db migrations to be compatible with postgres
cdecker Sep 7, 2019
6a7c92a
db: Implement postgres driver primitives
cdecker Sep 7, 2019
df570e3
db: Allow some internal queries to fail
cdecker Sep 10, 2019
5d94267
db: Select driver by dsn prefix
cdecker Sep 10, 2019
b01a6e1
db: Split the vars table to have type-specific columns
cdecker Sep 10, 2019
5f92f9f
db: Implement SQL statement rewriting
cdecker Sep 10, 2019
3037d9e
db: Change migrations to use types of the correct cardinality
cdecker Sep 10, 2019
fb719b3
db: Strengthen some null-checks on queries
cdecker Sep 10, 2019
46e569c
db: Check execution when accessing the result of a statement
cdecker Sep 12, 2019
0fc9fac
db: Change table field types to be more specific
cdecker Sep 12, 2019
3f03285
db: Adjust queries to work with postgres
cdecker Sep 12, 2019
5e4c3af
pytest: Skip some tests that assume we have a sqlite3 db on postgres
cdecker Sep 12, 2019
4abbd6f
pytest: Consolidate fee-fetching in test_setchannelfee_usage
cdecker Sep 12, 2019
a4dc503
pytest: Have the DB provider search for the postgres binary
cdecker Sep 14, 2019
951193e
pytest: Stabilize test_no_fee_estimate against UTXO selection issues
cdecker Sep 14, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
*.gcno
*.dSYM
*.rej
*.po
*.pyc
.cppcheck-suppress
TAGS
Expand Down
5 changes: 5 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,11 @@ else
LDLIBS = -L/usr/local/lib -lm -lgmp -lsqlite3 -lz $(COVFLAGS)
endif

# If we have the postgres client library we need to link against it as well
ifeq ($(HAVE_POSTGRES),1)
LDLIBS += -lpq
endif

default: all-programs all-test-programs

ccan/config.h: config.vars configure ccan/tools/configurator/configurator.c
Expand Down
14 changes: 14 additions & 0 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,20 @@ int main(void)
return 0;
}
/*END*/
var=HAVE_POSTGRES
desc=postgres
style=DEFINES_EVERYTHING|EXECUTE|MAY_NOT_COMPILE
link=-lpq
code=
#include <postgresql/libpq-fe.h>
#include <stdio.h>

int main(void)
{
printf("libpq version %d\n", PQlibVersion());
return 0;
}
/*END*/
var=HAVE_GCC
desc=compiler is GCC
style=OUTSIDE_MAIN
Expand Down
64 changes: 60 additions & 4 deletions devtools/sql-rewrite.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,73 @@

from mako.template import Template

import re
import sys


class Sqlite3Rewriter(object):
def rewrite(self, query):
DEBUG = False


def eprint(*args, **kwargs):
if not DEBUG:
return
print(*args, **kwargs, file=sys.stderr)


class Rewriter(object):

def rewrite_types(self, query, mapping):
for old, new in mapping.items():
query = re.sub(old, new, query)
return query

def rewrite_single(self, query):
return query

def rewrite(self, queries):
for i, q in enumerate(queries):
org = q['query']
queries[i]['query'] = self.rewrite_single(org)
eprint("Rewritten statement\n\tfrom {}\n\t to {}".format(org, q['query']))
return queries


class Sqlite3Rewriter(Rewriter):
def rewrite_single(self, query):
typemapping = {
r'BIGINT': 'INTEGER',
r'BIGINTEGER': 'INTEGER',
r'BIGSERIAL': 'INTEGER',
r'CURRENT_TIMESTAMP\(\)': "strftime('%s', 'now')",
r'INSERT INTO[ \t]+(.*)[ \t]+ON CONFLICT.*DO NOTHING;': 'INSERT OR IGNORE INTO \\1;',
}
return self.rewrite_types(query, typemapping)


class PostgresRewriter(Rewriter):
def rewrite_single(self, q):
# Let's start by replacing any eventual '?' placeholders
q2 = ""
count = 1
for c in q:
if c == '?':
c = "${}".format(count)
count += 1
q2 += c
query = q2

typemapping = {
r'BLOB': 'BYTEA',
r'CURRENT_TIMESTAMP\(\)': "EXTRACT(epoch FROM now())",
}

query = self.rewrite_types(query, typemapping)
return query


rewriters = {
"sqlite3": Sqlite3Rewriter(),
"postgres": PostgresRewriter(),
}

template = Template("""#ifndef LIGHTNINGD_WALLET_GEN_DB_${f.upper()}
Expand Down Expand Up @@ -62,7 +119,6 @@ def chunk(pofile):

queries = []
for c in chunk(pofile):
name = c[0][3:]

# Skip other comments
i = 1
Expand All @@ -73,7 +129,7 @@ def chunk(pofile):
query = c[i][7:][:-1]

queries.append({
'name': name,
'name': query,
'query': query,
'placeholders': query.count('?'),
'readonly': "true" if query.upper().startswith("SELECT") else "false",
Expand Down
6 changes: 6 additions & 0 deletions doc/lightningd-config.5
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,12 @@ is a relative path, it is relative to the starting directory, not
readable (we allow missing files in the default case)\. Using this inside
a configuration file is meaningless\.


\fBwallet\fR=\fIDSN\fR
Identify the location of the wallet\. This is a fully qualified data source
name, including a scheme such as \fBsqlite3\fR or \fBpostgres\fR followed by the
connection parameters\.

.SH Lightning node customization options

\fBalias\fR=\fIRRGGBB\fR
Expand Down
5 changes: 5 additions & 0 deletions doc/lightningd-config.5.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,11 @@ is a relative path, it is relative to the starting directory, not
readable (we allow missing files in the default case). Using this inside
a configuration file is meaningless.

**wallet**=*DSN*
Identify the location of the wallet. This is a fully qualified data source
name, including a scheme such as `sqlite3` or `postgres` followed by the
connection parameters.

### Lightning node customization options

**alias**=*RRGGBB*
Expand Down
2 changes: 2 additions & 0 deletions lightningd/lightningd.h
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,8 @@ struct lightningd {
const char *original_directory;

struct plugins *plugins;

char *wallet_dsn;
};

/* Turning this on allows a tal allocation to return NULL, rather than aborting.
Expand Down
5 changes: 5 additions & 0 deletions lightningd/options.c
Original file line number Diff line number Diff line change
Expand Up @@ -808,6 +808,11 @@ static void handle_minimal_config_opts(struct lightningd *ld,
opt_ignore_talstr, opt_show_charp,
&ld->config_dir,
"Set working directory. All other files are relative to this");

ld->wallet_dsn = tal_fmt(ld, "sqlite3://%s/lightningd.sqlite3", ld->config_dir);
opt_register_early_arg("--wallet", opt_set_talstr, NULL,
&ld->wallet_dsn,
"Location of the wallet database.");
}

static void register_opts(struct lightningd *ld)
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
sqlparse==0.3.0
mako==1.0.14
mrkd==0.1.5
197 changes: 197 additions & 0 deletions tests/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
from ephemeral_port_reserve import reserve
from glob import glob

import logging
import os
import psycopg2
import random
import re
import shutil
import signal
import sqlite3
import string
import subprocess
import time


class Sqlite3Db(object):
def __init__(self, path):
self.path = path

def get_dsn(self):
"""SQLite3 doesn't provide a DSN, resulting in no CLI-option.
"""
return None

def query(self, query):
orig = os.path.join(self.path)
copy = self.path + ".copy"
shutil.copyfile(orig, copy)
db = sqlite3.connect(copy)

db.row_factory = sqlite3.Row
c = db.cursor()
c.execute(query)
rows = c.fetchall()

result = []
for row in rows:
result.append(dict(zip(row.keys(), row)))

db.commit()
c.close()
db.close()
return result

def execute(self, query):
db = sqlite3.connect(self.path)
c = db.cursor()
c.execute(query)
db.commit()
c.close()
db.close()


class PostgresDb(object):
def __init__(self, dbname, port):
self.dbname = dbname
self.port = port

self.conn = psycopg2.connect("dbname={dbname} user=postgres host=localhost port={port}".format(
dbname=dbname, port=port
))
cur = self.conn.cursor()
cur.execute('SELECT 1')
cur.close()

def get_dsn(self):
return "postgres://postgres:password@localhost:{port}/{dbname}".format(
port=self.port, dbname=self.dbname
)

def query(self, query):
cur = self.conn.cursor()
cur.execute(query)

# Collect the results into a list of dicts.
res = []
for r in cur:
t = {}
# Zip the column definition with the value to get its name.
for c, v in zip(cur.description, r):
t[c.name] = v
res.append(t)
cur.close()
return res

def execute(self, query):
with self.conn, self.conn.cursor() as cur:
cur.execute(query)


class SqliteDbProvider(object):
def __init__(self, directory):
self.directory = directory

def start(self):
pass

def get_db(self, node_directory, testname, node_id):
path = os.path.join(
node_directory,
'lightningd.sqlite3'
)
return Sqlite3Db(path)

def stop(self):
pass


class PostgresDbProvider(object):
def __init__(self, directory):
self.directory = directory
self.port = None
self.proc = None
print("Starting PostgresDbProvider")

def locate_path(self):
prefix = '/usr/lib/postgresql/*'
matches = glob(prefix)

candidates = {}
for m in matches:
g = re.search(r'([0-9]+[\.0-9]*)', m)
if not g:
continue
candidates[float(g.group(1))] = m

if len(candidates) == 0:
raise ValueError("Could not find `postgres` and `initdb` binaries in {}. Is postgresql installed?".format(prefix))

# Now iterate in reverse order through matches
for k, v in sorted(candidates.items())[::-1]:
initdb = os.path.join(v, 'bin', 'initdb')
postgres = os.path.join(v, 'bin', 'postgres')
if os.path.isfile(initdb) and os.path.isfile(postgres):
logging.info("Found `postgres` and `initdb` in {}".format(os.path.join(v, 'bin')))
return initdb, postgres

raise ValueError("Could not find `postgres` and `initdb` in any of the possible paths: {}".format(candidates.values()))

def start(self):
passfile = os.path.join(self.directory, "pgpass.txt")
self.pgdir = os.path.join(self.directory, 'pgsql')
# Need to write a tiny file containing the password so `initdb` can pick it up
with open(passfile, 'w') as f:
f.write('cltest\n')

initdb, postgres = self.locate_path()
subprocess.check_call([
initdb,
'--pwfile={}'.format(passfile),
'--pgdata={}'.format(self.pgdir),
'--auth=trust',
'--username=postgres',
])
self.port = reserve()
self.proc = subprocess.Popen([
postgres,
'-k', '/tmp/', # So we don't use /var/lib/...
'-D', self.pgdir,
'-p', str(self.port),
'-F',
'-i',
])
# Hacky but seems to work ok (might want to make the postgres proc a TailableProc as well if too flaky).
time.sleep(1)
self.conn = psycopg2.connect("dbname=template1 user=postgres host=localhost port={}".format(self.port))

# Required for CREATE DATABASE to work
self.conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)

def get_db(self, node_directory, testname, node_id):
# Random suffix to avoid collisions on repeated tests
nonce = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
dbname = "{}_{}_{}".format(testname, node_id, nonce)

cur = self.conn.cursor()
cur.execute("CREATE DATABASE {};".format(dbname))
cur.close()
db = PostgresDb(dbname, self.port)
return db

def stop(self):
# Send fast shutdown signal see [1] for details:
#
# SIGINT
#
# This is the Fast Shutdown mode. The server disallows new connections
# and sends all existing server processes SIGTERM, which will cause
# them to abort their current transactions and exit promptly. It then
# waits for all server processes to exit and finally shuts down. If
# the server is in online backup mode, backup mode will be terminated,
# rendering the backup useless.
#
# [1] https://www.postgresql.org/docs/9.1/server-shutdown.html
self.proc.send_signal(signal.SIGINT)
self.proc.wait()
Loading