Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
d8d2ad5
feat(wip): add shielded outputs integration
pedroferreira1 Mar 19, 2026
ee3f691
testS: tests setup
pedroferreira1 Mar 20, 2026
b1e1324
feat: finish shielded outputs integration
pedroferreira1 Apr 9, 2026
9315ebf
refactor: use path 1' for spend path
pedroferreira1 Apr 9, 2026
62612cc
refactor: address PR review feedback for shielded outputs integration
pedroferreira1 Apr 14, 2026
aac3e62
use multi address as default for shielded outputs tests
pedroferreira1 Apr 14, 2026
ba29cac
feat: add rule enforcement of minimum of shielded outputs
pedroferreira1 Apr 14, 2026
89d8c52
tests: fix linter and unit tests
pedroferreira1 Apr 14, 2026
0ee10f0
chore: fix linter
pedroferreira1 Apr 14, 2026
c453e43
feat: add support for bigint in the shielded outputs values
pedroferreira1 Apr 14, 2026
1427322
remove unecessary spendADdresses from storage
pedroferreira1 Apr 14, 2026
30f4078
tests: add tests
pedroferreira1 Apr 14, 2026
827fa09
feat: use different account path derivation for scan pubkey
pedroferreira1 Apr 14, 2026
be1cd7f
refactor: unify shielded output processing and split IHistoryOutput type
pedroferreira1 Apr 14, 2026
c01f7c1
fix: surjection proof domain for custom token FullShielded outputs
pedroferreira1 Apr 15, 2026
af56e58
feat: implement spending shielded UTXOs (unshielding)
pedroferreira1 Apr 15, 2026
2368024
fix: address CodeRabbit review feedback
pedroferreira1 Apr 15, 2026
1e04651
test: improve shielded processing unit test coverage
pedroferreira1 Apr 15, 2026
f00b481
fix: hydrate shieldedOutputs from headers during deserialization
pedroferreira1 Apr 15, 2026
7cf5e49
fix: gate shielded address cursors on key availability
pedroferreira1 Apr 15, 2026
a40326f
refactor: remove stored pinCode from Storage, thread explicitly
pedroferreira1 Apr 15, 2026
7e72bf1
fix: cleanup from PR review
pedroferreira1 Apr 15, 2026
b20af0c
fix: address latest CodeRabbit review feedback
pedroferreira1 Apr 15, 2026
1484f59
fix: resolve ESLint default-param-last and no-explicit-any
pedroferreira1 Apr 15, 2026
c406dba
fix: remove unnecessary processHistory variable in syncHistory
pedroferreira1 Apr 15, 2026
d8011f6
add more unit and integration tests
pedroferreira1 Apr 15, 2026
0b89b44
improve comments and add more protection when accessing pubkeys
pedroferreira1 Apr 15, 2026
5a34315
fix: address medium review issues — validation, logging, constants
pedroferreira1 Apr 15, 2026
bdab561
fix: address HIGH review issues — fail-fast on invalid data
pedroferreira1 Apr 15, 2026
7ba7abb
fix(critical): persist blinding factors to UTXOs for shielded-to-shie…
pedroferreira1 Apr 15, 2026
e32f589
fix: address latest CodeRabbit review — timelock, schema, queue safety
pedroferreira1 Apr 15, 2026
78a8042
test: add shielded-specific test coverage for filter, signing, and ed…
pedroferreira1 Apr 15, 2026
fd83b54
fix: address review issues — security, validation, deduplication, cor…
pedroferreira1 Apr 15, 2026
6782b7f
chore: use published @hathor/ct-crypto-node 0.3.0 from npm
pedroferreira1 Apr 15, 2026
1efc856
fix: address CodeRabbit review — input types, normalization, annotations
pedroferreira1 Apr 15, 2026
ca42157
fix: address CodeRabbit review — input types, normalization, annotations
pedroferreira1 Apr 15, 2026
693f956
ci: increase integration test timeout to 120 minutes
pedroferreira1 Apr 16, 2026
658692a
fix: surjection
pedroferreira1 Apr 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ env:
jobs:
itest:
runs-on: 'ubuntu-latest'
timeout-minutes: 50
timeout-minutes: 120

strategy:
matrix:
Expand Down
261 changes: 261 additions & 0 deletions __tests__/headers/shielded_outputs.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,261 @@
/**
* Copyright (c) Hathor Labs and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/

import ShieldedOutputsHeader from '../../src/headers/shielded_outputs';
import ShieldedOutput from '../../src/models/shielded_output';
import { ShieldedOutputMode } from '../../src/shielded/types';
import Network from '../../src/models/network';

function makeAmountShieldedOutput(
overrides: Partial<{
commitment: Buffer;
rangeProof: Buffer;
tokenData: number;
script: Buffer;
ephemeralPubkey: Buffer;
}> = {}
): ShieldedOutput {
return new ShieldedOutput(
ShieldedOutputMode.AMOUNT_SHIELDED,
overrides.commitment ?? Buffer.alloc(33, 0x01),
overrides.rangeProof ?? Buffer.from([0x02, 0x03, 0x04]),
overrides.tokenData ?? 0,
overrides.script ?? Buffer.from([0x76, 0xa9, 0x14]),
overrides.ephemeralPubkey ?? Buffer.alloc(33, 0x05)
);
}

function makeFullShieldedOutput(
overrides: Partial<{
commitment: Buffer;
rangeProof: Buffer;
script: Buffer;
ephemeralPubkey: Buffer;
assetCommitment: Buffer;
surjectionProof: Buffer;
}> = {}
): ShieldedOutput {
return new ShieldedOutput(
ShieldedOutputMode.FULLY_SHIELDED,
overrides.commitment ?? Buffer.alloc(33, 0x11),
overrides.rangeProof ?? Buffer.from([0x22, 0x33]),
0,
overrides.script ?? Buffer.from([0x76, 0xa9]),
overrides.ephemeralPubkey ?? Buffer.alloc(33, 0x44),
overrides.assetCommitment ?? Buffer.alloc(33, 0x55),
overrides.surjectionProof ?? Buffer.from([0x66, 0x77, 0x88]),
0n
);
}

describe('ShieldedOutputsHeader', () => {
const network = new Network('testnet');

describe('serialize', () => {
it('should serialize header with AmountShielded outputs', () => {
const out1 = makeAmountShieldedOutput();
const out2 = makeAmountShieldedOutput({ tokenData: 1 });
const header = new ShieldedOutputsHeader([out1, out2]);

const parts: Buffer[] = [];
header.serialize(parts);
const buf = Buffer.concat(parts);

// First byte is header ID (0x12)
expect(buf[0]).toBe(0x12);
// Second byte is number of outputs
expect(buf[1]).toBe(2);
});

it('should serialize header with FullShielded outputs', () => {
const out = makeFullShieldedOutput();
const header = new ShieldedOutputsHeader([out]);

const parts: Buffer[] = [];
header.serialize(parts);
const buf = Buffer.concat(parts);

expect(buf[0]).toBe(0x12);
expect(buf[1]).toBe(1);
});
});

describe('serializeSighash', () => {
it('should produce different output from serialize (no proofs)', () => {
const out = makeAmountShieldedOutput();
const header = new ShieldedOutputsHeader([out]);

const serParts: Buffer[] = [];
header.serialize(serParts);
const serialized = Buffer.concat(serParts);

const sighashParts: Buffer[] = [];
header.serializeSighash(sighashParts);
const sighash = Buffer.concat(sighashParts);

// Sighash should be shorter (no range_proof length prefix or data)
expect(sighash.length).toBeLessThan(serialized.length);
// Both should start with header ID and count
expect(sighash[0]).toBe(0x12);
expect(sighash[1]).toBe(1);
});
});

describe('deserialize', () => {
it('should round-trip AmountShielded outputs', () => {
const out1 = makeAmountShieldedOutput();
const out2 = makeAmountShieldedOutput({ tokenData: 2, script: Buffer.from([0xab, 0xcd]) });
const header = new ShieldedOutputsHeader([out1, out2]);

const parts: Buffer[] = [];
header.serialize(parts);
const serialized = Buffer.concat(parts);

const [deserialized, remaining] = ShieldedOutputsHeader.deserialize(serialized, network);
const result = deserialized as ShieldedOutputsHeader;

expect(remaining.length).toBe(0);
expect(result.shieldedOutputs.length).toBe(2);

expect(result.shieldedOutputs[0].mode).toBe(ShieldedOutputMode.AMOUNT_SHIELDED);
expect(result.shieldedOutputs[0].commitment).toEqual(out1.commitment);
expect(result.shieldedOutputs[0].rangeProof).toEqual(out1.rangeProof);
expect(result.shieldedOutputs[0].tokenData).toBe(0);
expect(result.shieldedOutputs[0].script).toEqual(out1.script);
expect(result.shieldedOutputs[0].ephemeralPubkey).toEqual(out1.ephemeralPubkey);

expect(result.shieldedOutputs[1].tokenData).toBe(2);
expect(result.shieldedOutputs[1].script).toEqual(Buffer.from([0xab, 0xcd]));
});

it('should round-trip FullShielded outputs', () => {
const out = makeFullShieldedOutput();
const header = new ShieldedOutputsHeader([out]);

const parts: Buffer[] = [];
header.serialize(parts);
const serialized = Buffer.concat(parts);

const [deserialized, remaining] = ShieldedOutputsHeader.deserialize(serialized, network);
const result = deserialized as ShieldedOutputsHeader;

expect(remaining.length).toBe(0);
expect(result.shieldedOutputs.length).toBe(1);

const d = result.shieldedOutputs[0];
expect(d.mode).toBe(ShieldedOutputMode.FULLY_SHIELDED);
expect(d.commitment).toEqual(out.commitment);
expect(d.rangeProof).toEqual(out.rangeProof);
expect(d.script).toEqual(out.script);
expect(d.ephemeralPubkey).toEqual(out.ephemeralPubkey);
expect(d.assetCommitment).toEqual(out.assetCommitment);
expect(d.surjectionProof).toEqual(out.surjectionProof);
});

it('should round-trip mixed AmountShielded and FullShielded outputs', () => {
const amountOut = makeAmountShieldedOutput({ tokenData: 1 });
const fullOut = makeFullShieldedOutput();
const header = new ShieldedOutputsHeader([amountOut, fullOut]);

const parts: Buffer[] = [];
header.serialize(parts);
const serialized = Buffer.concat(parts);

const [deserialized, remaining] = ShieldedOutputsHeader.deserialize(serialized, network);
const result = deserialized as ShieldedOutputsHeader;

expect(remaining.length).toBe(0);
expect(result.shieldedOutputs.length).toBe(2);
expect(result.shieldedOutputs[0].mode).toBe(ShieldedOutputMode.AMOUNT_SHIELDED);
expect(result.shieldedOutputs[1].mode).toBe(ShieldedOutputMode.FULLY_SHIELDED);
});

it('should preserve remaining buffer bytes', () => {
const out = makeAmountShieldedOutput();
const header = new ShieldedOutputsHeader([out]);

const parts: Buffer[] = [];
header.serialize(parts);
const trailingData = Buffer.from([0xfe, 0xed]);
const serialized = Buffer.concat([Buffer.concat(parts), trailingData]);

const [_, remaining] = ShieldedOutputsHeader.deserialize(serialized, network);
expect(remaining).toEqual(trailingData);
});

it('should throw for invalid header ID', () => {
const buf = Buffer.from([0xff, 0x01]);
expect(() => ShieldedOutputsHeader.deserialize(buf, network)).toThrow('Invalid');
});

it('should re-serialize to identical bytes', () => {
const header = new ShieldedOutputsHeader([
makeAmountShieldedOutput(),
makeFullShieldedOutput(),
]);

const parts1: Buffer[] = [];
header.serialize(parts1);
const bytes1 = Buffer.concat(parts1);

const [deserialized] = ShieldedOutputsHeader.deserialize(bytes1, network);
const parts2: Buffer[] = [];
(deserialized as ShieldedOutputsHeader).serialize(parts2);
const bytes2 = Buffer.concat(parts2);

expect(bytes2).toEqual(bytes1);
});
});

describe('deserialization bounds checking', () => {
// Reuse suite-level `network` from line 56
// Header ID (0x12) + numOutputs(1) + mode(1) = minimum 3 bytes before commitment
const headerId = 0x12;

it('should throw on truncated commitment', () => {
// header_id + num_outputs=1 + mode=1 + only 32 bytes (need 33)
const buf = Buffer.from([headerId, 0x01, 0x01, ...Array(32).fill(0)]);
expect(() => ShieldedOutputsHeader.deserialize(buf, network)).toThrow(/missing commitment/);
});

it('should throw on truncated range proof', () => {
// header_id + num=1 + mode=1 + commitment(33) + rp_len=2 bytes saying 100 + only 10 bytes
const buf = Buffer.alloc(3 + 33 + 2 + 10);
buf[0] = headerId;
buf[1] = 1; // num outputs
buf[2] = 1; // mode AMOUNT_SHIELDED
buf.writeUInt16BE(100, 3 + 33); // range proof length = 100
expect(() => ShieldedOutputsHeader.deserialize(buf, network)).toThrow(
/incomplete range proof/
);
});

it('should throw on unknown mode byte', () => {
// header_id + num=1 + mode=0x99 + enough bytes for commitment
const buf = Buffer.alloc(3 + 33 + 2 + 5 + 2 + 5 + 1 + 33);
buf[0] = headerId;
buf[1] = 1;
buf[2] = 0x99; // unknown mode
expect(() => ShieldedOutputsHeader.deserialize(buf, network)).toThrow(
/Unsupported shielded output mode: 153/
);
});

it('should throw on truncated ephemeral pubkey', () => {
// Build a valid AmountShielded up to the ephemeral pubkey, then truncate
const header = new ShieldedOutputsHeader([makeAmountShieldedOutput()]);
const parts: Buffer[] = [];
header.serialize(parts);
const full = Buffer.concat(parts);
// Trim the last 10 bytes (ephemeral pubkey is 33 bytes at the end)
const truncated = full.subarray(0, full.length - 10);
expect(() => ShieldedOutputsHeader.deserialize(truncated, network)).toThrow(
/missing ephemeral pubkey/
);
});
});
});
31 changes: 20 additions & 11 deletions __tests__/integration/configuration/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,16 @@ services:
# All the following services are related to the core of the Private Network
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO we must make it work without cpuminer, using the dev miner

# For more information on these configs, refer to:
# https://github.com/HathorNetwork/rfcs/blob/master/text/0033-private-network-guide.md
# Fullnode on 8080, tx-mining-service API on 8035, tx-mining-service DevMiner on 8034

# Fullnode on 8080 , tx mining service on 8035, cpuminer stratum on 8034

fullnode:
image:
${HATHOR_LIB_INTEGRATION_TESTS_FULLNODE_IMAGE:-hathornetwork/hathor-core:sha-dc521be2}
${HATHOR_LIB_INTEGRATION_TESTS_FULLNODE_IMAGE:-hathornetwork/hathor-core:experimental-shielded-outputs-alpha-v1}
command: [
"run_node",
"--listen", "tcp:40404",
"--status", "8080",
"--test-mode-tx-weight",
"--test-mode-block-weight",
"--wallet-index",
"--allow-mining-without-peers",
"--unsafe-mode", "nano-testnet-bravo",
Expand Down Expand Up @@ -44,7 +42,7 @@ services:
tx-mining-service:
platform: linux/amd64
image:
${HATHOR_LIB_INTEGRATION_TESTS_TXMINING_IMAGE:-hathornetwork/tx-mining-service}
${HATHOR_LIB_INTEGRATION_TESTS_TXMINING_IMAGE:-hathornetwork/tx-mining-service:shielded-outputs-v1}
depends_on:
fullnode:
condition: service_healthy
Expand All @@ -54,11 +52,22 @@ services:
command: [
"http://fullnode:8080",
"--stratum-port=8034",
"--block-interval=1000",
"--api-port=8035",
"--dev-miner",
"--testnet",
"--address", "WTjhJXzQJETVx7BVXdyZmvk396DRRsubdw", # Miner rewards address (WALLET_CONSTANTS.miner in test-constants.ts)
"--testnet"
]
networks:
- hathor-privnet

cpuminer:
image: hathornetwork/cpuminer
depends_on:
- tx-mining-service
command: [
"-a", "sha256d",
"--coinbase-addr", "WTjhJXzQJETVx7BVXdyZmvk396DRRsubdw", # Refer to test-utils-integration.js, WALLET_CONSTANTS
"-o", "stratum+tcp://tx-mining-service:8034",
"--retry-pause", "5", # 5 seconds between retries
"-t", "1" # Number of threads used to mine
]
networks:
- hathor-privnet
Expand Down Expand Up @@ -228,8 +237,8 @@ services:
AWS_SHARED_CREDENTIALS_FILE: ".aws/credentials" # Credentials for mocked AWS
AWS_CONFIG_FILE: ".aws/config" # Config for mocked AWS
ports:
- "3000:3000"
- "3001:3001"
- "3100:3000"
- "3101:3001"
Comment on lines +240 to +241
Copy link
Copy Markdown

@coderabbitai coderabbitai bot Apr 15, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
# Search for hardcoded references to old ports 3000/3001 in test files
echo "=== Searching for localhost:3000 or localhost:3001 references ==="
rg -n "localhost:300[01]|127.0.0.1:300[01]" --type ts --type js

echo ""
echo "=== Checking service-facade.helper.ts for port definitions ==="
fd -t f "service-facade.helper.ts" --exec cat {}

Repository: HathorNetwork/hathor-wallet-lib

Length of output: 8039


Update test helpers to use the new host ports 3100 and 3101.

The docker-compose port mapping changed from 3000:3000/3001:3001 to 3100:3000/3101:3001, but the test helpers still hardcode the old ports:

  • __tests__/integration/helpers/service-facade.helper.ts:37-38config.setWalletServiceBaseUrl('http://localhost:3000/dev/') and config.setWalletServiceBaseWsUrl('ws://localhost:3001/') must be updated to use 3100 and 3101
  • __tests__/integration/adapters/types.ts:102 — comment references localhost:3000 and needs updating

Tests will fail connecting to the services without these updates.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@__tests__/integration/configuration/docker-compose.yml` around lines 240 -
241, Update the hardcoded localhost ports to match the docker-compose change:
replace usages of config.setWalletServiceBaseUrl('http://localhost:3000/dev/')
with port 3100 and config.setWalletServiceBaseWsUrl('ws://localhost:3001/') with
port 3101 (i.e., 'http://localhost:3100/dev/' and 'ws://localhost:3101/'), and
update the explanatory comment that mentions "localhost:3000" to reference the
new host port 3100 so tests and adapters point to the correct service endpoints.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's a comment by myself about changes in this file. This will be addressed later

Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@pedroferreira1, understood! I'll leave this open for now. Would you like me to create a follow-up GitHub issue to track the update of the hardcoded ports in __tests__/integration/helpers/service-facade.helper.ts (lines 37–38) and __tests__/integration/adapters/types.ts (line 102) from 3000/3001 to 3100/3101?

networks:
- hathor-privnet

Expand Down
1 change: 1 addition & 0 deletions __tests__/integration/configuration/privnet.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ CHECKPOINTS: []

ENABLE_NANO_CONTRACTS: enabled
ENABLE_FEE_BASED_TOKENS: enabled
ENABLE_SHIELDED_TRANSACTIONS: enabled
NC_ON_CHAIN_BLUEPRINT_RESTRICTED: false
AVG_TIME_BETWEEN_BLOCKS: 1
FEE_PER_OUTPUT: 1
Expand Down
5 changes: 3 additions & 2 deletions __tests__/integration/hathorwallet_facade.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ describe('template methods', () => {
// After mining and pushing, the hash should be set (it was null before mining)
expect(tx.hash).not.toBeNull();
expect(typeof tx.nonce).toBe('number');
expect(tx.nonce).toBeGreaterThan(0);
});

it('should send transactions from the template transaction', async () => {
Expand Down Expand Up @@ -1469,7 +1470,7 @@ describe('sendManyOutputsTransaction', () => {
* The locked/unlocked balances are usually updated when new transactions arrive.
* We will force this update here without a new tx, for testing purposes.
*/
await hWallet.storage.processHistory();
await hWallet.storage.processHistory(hWallet.pinCode ?? undefined);

// Validating getBalance ( moment 1 )
htrBalance = await hWallet.getBalance(NATIVE_TOKEN_UID);
Expand All @@ -1487,7 +1488,7 @@ describe('sendManyOutputsTransaction', () => {
await delay(waitFor2);

// Forcing balance updates
await hWallet.storage.processHistory();
await hWallet.storage.processHistory(hWallet.pinCode ?? undefined);

// Validating getBalance ( moment 2 )
htrBalance = await hWallet.getBalance(NATIVE_TOKEN_UID);
Expand Down
2 changes: 1 addition & 1 deletion __tests__/integration/helpers/wallet.helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ export async function waitForTxReceived(
// so after the transaction arrives, all the metadata involved on it is updated and we can
// continue running the tests to correctly check balances, addresses, and everyting else
await updateInputsSpentBy(hWallet, storageTx);
await hWallet.storage.processHistory();
await hWallet.storage.processHistory(hWallet.pinCode ?? undefined);
}

return storageTx;
Expand Down
Loading
Loading