Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ packages/wallet-service/.env*
packages/wallet-service/.warmup
.yarn/
.env.*
*.tsbuildinfo
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
"workspaces": [
"packages/common",
"packages/daemon",
"packages/wallet-service"
"packages/wallet-service",
"packages/event-downloader"
],
"engines": {
"node": ">=22"
Expand Down
3 changes: 3 additions & 0 deletions packages/daemon/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"lint": "eslint .",
"build": "tsc -b",
"start": "node dist/index.js",
"replay-balance": "yarn dlx ts-node src/scripts/replay-balance.ts",
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
set -euo pipefail

echo "=== Checking for 'replay-balance' script entries ==="
rg -n '"replay-balance"\s*:' -g '**/package.json' || echo "No matches found"

echo ""
echo "=== Checking for 'ts-node' in package.json files ==="
rg -n '"ts-node"\s*:' -g '**/package.json' || echo "No matches found"

echo ""
echo "=== Checking for 'yarn dlx ts-node' usage ==="
rg -n 'yarn dlx ts-node' -g '**/package.json' || echo "No matches found"

echo ""
echo "=== Checking root package.json for ts-node ==="
if [ -f "package.json" ]; then
  grep -n "ts-node" package.json || echo "ts-node not found in root package.json"
fi

echo ""
echo "=== Verifying replay-balance.ts script exists ==="
if [ -f "packages/daemon/src/scripts/replay-balance.ts" ]; then
  echo "✓ Script file exists: packages/daemon/src/scripts/replay-balance.ts"
else
  echo "✗ Script file NOT found at packages/daemon/src/scripts/replay-balance.ts"
fi

echo ""
echo "=== Checking if ts-node is in daemon package devDependencies ==="
if [ -f "packages/daemon/package.json" ]; then
  grep -A 50 '"devDependencies"' packages/daemon/package.json | grep -n '"ts-node"' || echo "ts-node not in daemon devDependencies"
fi

Repository: HathorNetwork/hathor-wallet-service

Length of output: 1169


Add ts-node to devDependencies to avoid version drift across multiple scripts.

Using yarn dlx ts-node bypasses lockfile reproducibility and introduces version drift over time. This pattern affects multiple scripts in the package (lines 16, 22–24):

  • replay-balance
  • test_images_wait_for_db
  • test_images_wait_for_ws
  • test_images_setup_database

Add ts-node to devDependencies and update all four scripts to invoke it directly (e.g., ts-node src/scripts/replay-balance.ts instead of yarn dlx ts-node src/scripts/replay-balance.ts).

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@packages/daemon/package.json` at line 16, Add ts-node to devDependencies and
update the package.json script entries so they call the installed ts-node binary
instead of using yarn dlx; specifically, add "ts-node" to devDependencies and
change the scripts "replay-balance", "test_images_wait_for_db",
"test_images_wait_for_ws", and "test_images_setup_database" to invoke ts-node
directly (e.g., replace "yarn dlx ts-node src/scripts/replay-balance.ts" with
"ts-node src/scripts/replay-balance.ts") so the lockfile determines the ts-node
version and prevents dlx-driven version drift.

"watch": "tsc -w",
"test_images_up": "docker compose -f ./__tests__/integration/scripts/docker-compose.yml up -d",
"test_images_down": "docker compose -f ./__tests__/integration/scripts/docker-compose.yml down",
Expand All @@ -28,13 +29,15 @@
"author": "André Abadesso",
"module": "dist/index.js",
"devDependencies": {
"@types/better-sqlite3": "7.6.12",
"@types/jest": "29.5.4",
"@types/lodash": "4.14.199",
"@types/mysql": "2.15.21",
"@types/node": "17.0.45",
"@types/ws": "8.5.5",
"@typescript-eslint/eslint-plugin": "6.7.3",
"@typescript-eslint/parser": "6.7.3",
"better-sqlite3": "11.7.0",
"eslint": "9.3.0",
"eslint-config-airbnb-base": "15.0.0",
"eslint-plugin-import": "2.29.1",
Expand Down
261 changes: 261 additions & 0 deletions packages/daemon/src/scripts/replay-balance.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,261 @@
/**
* Copyright (c) Hathor Labs and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

/**
* Balance replay script for debugging wallet_balance discrepancies.
*
* Reads events from a SQLite database produced by the event-downloader,
* processes them in order using the daemon's own balance utilities, and
* computes the expected HTR balance for a given set of addresses.
*
* Usage:
* node dist/scripts/replay-balance.js [options]
*
* Options:
* --db <path> Path to events.sqlite (default: ./events.sqlite)
* --addresses <path> Path to addresses CSV (default: ./addresses.csv)
* --token <uid> Token UID to compute balance for (default: NATIVE_TOKEN_UID)
* --expected <value> Expected balance in hatoshis for comparison
Comment thread
tuliomir marked this conversation as resolved.
* --verbose Show per-transaction breakdown
*/

import Database from 'better-sqlite3';
import * as fs from 'fs';
import { bigIntUtils, constants } from '@hathor/wallet-lib';
import { prepareOutputs, prepareInputs } from '../utils/wallet';

// ---------------------------------------------------------------------------
// CLI argument parsing
// ---------------------------------------------------------------------------

interface Opts {
db: string;
addresses: string;
token: string;
expected?: bigint;
verbose: boolean;
}

function parseArgs(): Opts {
const args = process.argv.slice(2);
const opts: Opts = {
db: './events.sqlite',
addresses: './addresses.csv',
token: constants.NATIVE_TOKEN_UID,
verbose: false,
};

const readNext = (index: number, flag: string): string => {
const value = args[index + 1];
if (!value || value.startsWith('--')) {
throw new Error(`Missing value for ${flag}`);
}
return value;
};

for (let i = 0; i < args.length; i++) {
switch (args[i]) {
case '--db': opts.db = readNext(i, '--db'); i++; break;
case '--addresses': opts.addresses = readNext(i, '--addresses'); i++; break;
case '--token': opts.token = readNext(i, '--token'); i++; break;
case '--expected': opts.expected = BigInt(readNext(i, '--expected')); i++; break;
case '--verbose': opts.verbose = true; break;
default:
throw new Error(`Unknown option: ${args[i]}`);
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
return opts;
}

// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------

function loadAddresses(csvPath: string): Set<string> {
const lines = fs.readFileSync(csvPath, 'utf-8')
.split(/\r?\n/)
.map(l => l.trim())
.filter(Boolean);

const hasHeader = lines[0]?.toLowerCase().startsWith('address');
const dataLines = hasHeader ? lines.slice(1) : lines;

return new Set(
dataLines
.map(line => line.split(',')[0]?.trim())
.filter((addr): addr is string => Boolean(addr)),
);
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

interface TxState {
hash: string;
voided: boolean;
outputs: any[];
inputs: any[];
tokens: string[];
height: number;
timestamp: number;
}

// ---------------------------------------------------------------------------
// Main
// ---------------------------------------------------------------------------

function main() {
const opts = parseArgs();

const walletAddresses = loadAddresses(opts.addresses);
console.log(`Loaded ${walletAddresses.size} wallet addresses`);
console.log(`Token: ${opts.token === constants.NATIVE_TOKEN_UID ? 'HTR (native)' : opts.token}`);

if (walletAddresses.size === 0) {
throw new Error('No wallet addresses found in CSV');
}

const sqlite = new Database(opts.db, { readonly: true });

// Build the WHERE clause — one parameterized LIKE condition per address
const addresses = Array.from(walletAddresses);
const conditions = addresses.map(() => 'data LIKE ?').join(' OR ');
const likeParams = addresses.map(addr => `%${addr}%`);

const rows = sqlite.prepare(`
SELECT id, type, data
FROM events
WHERE type IN ('NEW_VERTEX_ACCEPTED', 'VERTEX_METADATA_CHANGED')
AND (${conditions})
ORDER BY id ASC
`).all(...likeParams) as Array<{ id: number; type: string; data: string }>;
Comment on lines +122 to +132
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Potential failure on large address sets due to SQLite bind-parameter caps.

Line 118/Line 127 scale placeholders with address count; large CSVs can exceed SQLite variable limits and abort the script.

💡 Proposed fix (chunk address filters)
-  const addresses = Array.from(walletAddresses);
-  const conditions = addresses.map(() => 'data LIKE ?').join(' OR ');
-  const likeParams = addresses.map(addr => `%${addr}%`);
-
-  const rows = sqlite.prepare(`
-    SELECT id, type, data
-    FROM events
-    WHERE type IN ('NEW_VERTEX_ACCEPTED', 'VERTEX_METADATA_CHANGED')
-      AND (${conditions})
-    ORDER BY id ASC
-  `).all(...likeParams) as Array<{ id: number; type: string; data: string }>;
+  const addresses = Array.from(walletAddresses);
+  const MAX_SQL_PARAMS = 900;
+  const rowById = new Map<number, { id: number; type: string; data: string }>();
+
+  for (let i = 0; i < addresses.length; i += MAX_SQL_PARAMS) {
+    const chunk = addresses.slice(i, i + MAX_SQL_PARAMS);
+    const conditions = chunk.map(() => 'data LIKE ?').join(' OR ');
+    const likeParams = chunk.map(addr => `%${addr}%`);
+
+    const chunkRows = sqlite.prepare(`
+      SELECT id, type, data
+      FROM events
+      WHERE type IN ('NEW_VERTEX_ACCEPTED', 'VERTEX_METADATA_CHANGED')
+        AND (${conditions})
+      ORDER BY id ASC
+    `).all(...likeParams) as Array<{ id: number; type: string; data: string }>;
+
+    for (const row of chunkRows) rowById.set(row.id, row);
+  }
+
+  const rows = Array.from(rowById.values()).sort((a, b) => a.id - b.id);
#!/bin/bash
set -euo pipefail

SCRIPT_FILE="$(fd 'replay-balance.ts$' | head -n1)"
echo "Checking placeholder-per-address pattern in ${SCRIPT_FILE}:"
rg -n "data LIKE \?|all\(\.\.\.likeParams\)" "${SCRIPT_FILE}"

CSV_FILE="$(fd 'addresses.csv$' | head -n1 || true)"
if [[ -n "${CSV_FILE}" ]]; then
  python - "${CSV_FILE}" <<'PY'
import sys
p = sys.argv[1]
lines = [l.strip() for l in open(p, encoding="utf-8").read().splitlines() if l.strip()]
if lines and lines[0].lower().startswith("address"):
    lines = lines[1:]
count = len(lines)
print(f"address_count={count}")
print("sqlite_param_risk=HIGH" if count > 999 else "sqlite_param_risk=LOW")
PY
else
  echo "No addresses.csv found in repository to estimate runtime parameter count."
fi
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@packages/daemon/src/scripts/replay-balance.ts` around lines 117 - 127, The
query builds a variable number of "data LIKE ?" placeholders from
walletAddresses (addresses, conditions, likeParams) and calls
sqlite.prepare(...).all(...likeParams), which can exceed SQLite's bind-parameter
limit for large address sets; change replay logic to chunk addresses into
batches under the SQLite parameter limit (e.g., 999), run the SELECT in
sqlite.prepare(...).all(...) per chunk using the chunk's likeParams,
collect/merge the rows (deduplicating by id) and preserve ORDER BY id ASC, and
ensure this batching is implemented where the current query is constructed and
executed (see variables addresses, conditions, likeParams and the
sqlite.prepare(...).all call).


sqlite.close();

console.log(`Found ${rows.length} relevant events`);

// ------------------------------------------------------------------
// Pass 1: build the final state of each transaction.
// Later events (VERTEX_METADATA_CHANGED) overwrite earlier ones so
// the map always holds the most recent voided_by for each tx.
// ------------------------------------------------------------------

const txStates = new Map<string, TxState>();

for (const row of rows) {
const event = bigIntUtils.JSONBigInt.parse(row.data);
const data = event.event.data;
const hash: string = data.hash;
const voided: boolean = data.metadata.voided_by.length > 0;

txStates.set(hash, {
hash,
voided,
outputs: data.outputs,
inputs: data.inputs,
tokens: data.tokens ?? [],
height: data.metadata.height,
timestamp: data.timestamp,
});
}

console.log(`Unique transactions: ${txStates.size}`);

// ------------------------------------------------------------------
// Pass 2: build the spending map.
// For each non-voided transaction, record which tx hash spends each
// UTXO referenced by its inputs.
// ------------------------------------------------------------------

const spentBy = new Map<string, string>(); // "${txId}:${index}" -> spending tx hash

for (const tx of txStates.values()) {
if (tx.voided) continue;
const inputs = prepareInputs(tx.inputs, tx.tokens);
for (const input of inputs) {
spentBy.set(`${input.tx_id}:${input.index}`, tx.hash);
}
}

// ------------------------------------------------------------------
// Pass 3: compute the wallet balance.
// Sum the value of every unspent HTR output in a non-voided tx that
// belongs to one of the wallet addresses.
// ------------------------------------------------------------------

let totalBalance = 0n;

interface Contribution {
hash: string;
outputIndex: number;
address: string;
amount: bigint;
height: number;
timestamp: number;
}

const unspentUtxos: Contribution[] = [];

for (const tx of txStates.values()) {
if (tx.voided) continue;

const outputs = prepareOutputs(tx.outputs, tx.tokens);

for (const output of outputs) {
const address = output.decoded?.address;
if (!address || !walletAddresses.has(address)) continue;
if (output.token !== opts.token) continue;

// Skip authority outputs (mint / melt)
const isAuthority = (output.token_data & 0x80) !== 0;
if (isAuthority) continue;

const utxoKey = `${tx.hash}:${output.index}`;
if (spentBy.has(utxoKey)) continue;

totalBalance += BigInt(output.value as unknown as number);
unspentUtxos.push({
hash: tx.hash,
outputIndex: output.index,
address,
amount: BigInt(output.value as unknown as number),
height: tx.height,
timestamp: tx.timestamp,
});
}
}

// ------------------------------------------------------------------
// Output
// ------------------------------------------------------------------

if (opts.verbose) {
console.log('\n--- Unspent HTR UTXOs ---');
unspentUtxos
.sort((a, b) => a.height - b.height)
.forEach(u => {
console.log(
` height=${u.height} ${u.hash.substring(0, 16)}... ` +
`output[${u.outputIndex}] ${u.address.substring(0, 8)}... ` +
`+${u.amount} hat`,
);
});
}

console.log('\n=== RESULTS ===');
console.log(`Computed balance : ${totalBalance} hatoshis`);
console.log(`Unspent UTXOs : ${unspentUtxos.length}`);

if (opts.expected !== undefined) {
console.log(`Expected balance : ${opts.expected} hatoshis`);
if (totalBalance === opts.expected) {
console.log('✓ MATCH');
} else {
const diff = totalBalance - opts.expected;
console.log(`✗ MISMATCH — diff: ${diff > 0n ? '+' : ''}${diff} hatoshis`);
}
}
}

main();
1 change: 1 addition & 0 deletions packages/event-downloader/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
events.*
33 changes: 33 additions & 0 deletions packages/event-downloader/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
{
"name": "event-downloader",
"license": "MIT",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"files": [
"dist",
"src"
],
"engines": {
"node": ">=22"
},
Comment thread
coderabbitai[bot] marked this conversation as resolved.
"scripts": {
"build": "tsc -b",
"start": "node dist/index.js",
"lint": "eslint ."
},
"peerDependencies": {
"@hathor/wallet-lib": ">=2.8.3"
},
"dependencies": {
"better-sqlite3": "11.7.0",
"dotenv": "16.4.5",
"ws": "8.18.0",
"zod": "3.23.8"
},
"devDependencies": {
"@types/better-sqlite3": "7.6.12",
"@types/node": "22.10.2",
"@types/ws": "8.5.13",
"typescript": "5.7.2"
}
}
46 changes: 46 additions & 0 deletions packages/event-downloader/src/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/**
* Copyright (c) Hathor Labs and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

import 'dotenv/config';

const requiredEnvs = [
'FULLNODE_HOST',
];

export const checkEnvVariables = () => {
const missingEnv = requiredEnvs.filter((envVar) => {
const value = process.env[envVar];
return value === undefined || value.trim() === '';
});

if (missingEnv.length > 0) {
throw new Error(`Missing required environment variables: ${missingEnv.join(', ')}`);
}
};

const parsePositiveInt = (envName: string, fallback: number): number => {
const raw = process.env[envName];
const value = Number.parseInt(raw ?? String(fallback), 10);
if (!Number.isInteger(value) || value <= 0) {
throw new Error(`Invalid ${envName}: expected a positive integer, got "${raw}"`);
}
return value;
};

// Fullnode connection
export const FULLNODE_HOST = process.env.FULLNODE_HOST!;
export const USE_SSL = process.env.USE_SSL === 'true';

// Download configuration
export const BATCH_SIZE = parsePositiveInt('BATCH_SIZE', 5000);
export const PARALLEL_CONNECTIONS = parsePositiveInt('PARALLEL_CONNECTIONS', 5);
export const WINDOW_SIZE = parsePositiveInt('WINDOW_SIZE', 100);
export const CONNECTION_TIMEOUT_MS = parsePositiveInt('CONNECTION_TIMEOUT_MS', 60000);

// Database configuration
export const DB_PATH = process.env.DB_PATH ?? './events.sqlite';

Loading