Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
7291f6c
feat(daemon): handling TOKEN_CREATED event
andreabadesso Nov 28, 2025
df6b3b2
feat(daemon): added a token_creation table which is a one to many tab…
andreabadesso Nov 28, 2025
18cce7d
tests(daemon): added tests for the token creation event handling
andreabadesso Nov 28, 2025
1eac864
tests(daemon): added tests for token creation
andreabadesso Nov 28, 2025
ae4bcdc
fix(daemon): correct zod validation
andreabadesso Nov 28, 2025
5a20a01
feat(daemon): handling edge cases like create token tx with nano head…
andreabadesso Nov 28, 2025
84b4f68
tests(daemon): added new hybrid scenario
andreabadesso Nov 28, 2025
4a8565b
fix(daemon): db tests
andreabadesso Dec 3, 2025
9bf6442
tests(daemon): updated daemon to use new simulator
andreabadesso Dec 3, 2025
ffe2fe9
refactor(daemon): updated token integration test after updating hashes
andreabadesso Dec 3, 2025
73050e8
fix(daemon): initial_amount is not required
andreabadesso Dec 5, 2025
aff93d2
refactor(daemon): updated simulator hashes
andreabadesso Dec 8, 2025
587defc
Merge branch 'master' into feat/token-creation-event
andreabadesso Dec 8, 2025
4807e1e
chore(daemon): using experimental hathor core version for integration…
andreabadesso Dec 8, 2025
b2a1c8d
tests(daemon): updated tests to consider that tokens are created by t…
andreabadesso Dec 8, 2025
fbf3732
tests(daemon): updated hashes to match fixed seed simulator
andreabadesso Dec 9, 2025
872ba13
chore(daemon): always pull images
andreabadesso Dec 9, 2025
ff36456
refactor(daemon): removed IGNORE from token_creation query
andreabadesso Dec 9, 2025
e5474bc
refactor(daemon): using mysql.end instead of destroy
andreabadesso Dec 10, 2025
c0643a9
refactor(daemon): removed redudant deleteTokenCreationMapping
andreabadesso Dec 10, 2025
d2df783
refactor(daemon): deleting all tokens created by a voided block
andreabadesso Dec 12, 2025
51a02cd
chore(daemon): added first block migration
andreabadesso Dec 12, 2025
83b0a57
fix(daemon): add onError handler to detectingDiff state to prevent si…
andreabadesso Dec 16, 2025
4be9e92
tests(wallet-service): using fixed hashes
andreabadesso Dec 16, 2025
7a06e3a
chore(daemon): updated token creation images
andreabadesso Dec 16, 2025
9a7f1ff
tests(daemon): fixed tests
andreabadesso Dec 16, 2025
2a873a8
tests(daemon): updated reorg scenario latest event
andreabadesso Dec 16, 2025
611f4d3
feat(daemon): handling nano re-execution
andreabadesso Dec 17, 2025
634afef
tests(daemon): added blockId to all insertTokenCreation calls in tests
andreabadesso Dec 17, 2025
764474d
refactor(daemon): handling first_block -> null by deleting nc tokens
andreabadesso Dec 17, 2025
3c40e32
refactor: test should expect the nc token to be deleted when the tx t…
andreabadesso Jan 6, 2026
f16faeb
refactor(wallet-service): added missing guards
andreabadesso Jan 6, 2026
bc75f93
Merge branch 'master' into feat/token-creation-event
andreabadesso Jan 6, 2026
2023576
tests(daemon): improved misleading comment on test
andreabadesso Jan 8, 2026
83b560a
Merge branch 'master' into feat/token-creation-event
andreabadesso Jan 9, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 39 additions & 0 deletions db/migrations/20251128000000-create-token-creation.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
'use strict';

/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('token_creation', {
token_id: {
type: Sequelize.STRING(64),
allowNull: false,
primaryKey: true,
references: {
model: 'token',
key: 'id',
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE',
},
tx_id: {
type: Sequelize.STRING(64),
allowNull: false,
comment: 'Transaction ID that created the token (regular or nano contract)',
},
created_at: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'),
},
});

// Add index on tx_id for efficient lookups when voiding transactions
await queryInterface.addIndex('token_creation', ['tx_id'], {
name: 'token_creation_tx_id_idx',
});
},

async down(queryInterface, Sequelize) {
await queryInterface.dropTable('token_creation');
},
};
21 changes: 21 additions & 0 deletions db/migrations/20251212000000-add-first-block-to-token-creation.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
'use strict';

/** @type {import('sequelize-cli').Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.addColumn('token_creation', 'first_block', {
type: Sequelize.STRING(64),
allowNull: true,
comment: 'First block hash that confirmed the nano contract execution that created this token',
});

await queryInterface.addIndex('token_creation', ['first_block'], {
name: 'token_creation_first_block_idx',
});
},

async down(queryInterface) {
await queryInterface.removeIndex('token_creation', 'token_creation_first_block_idx');
await queryInterface.removeColumn('token_creation', 'first_block');
},
};
211 changes: 211 additions & 0 deletions packages/daemon/__tests__/db/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ import {
incrementTokensTxCount,
markUtxosAsVoided,
storeTokenInformation,
insertTokenCreation,
getTokensCreatedByTx,
getReexecNanoTokens,
deleteTokens,
unlockUtxos,
unspendUtxos,
updateAddressLockedBalance,
Expand Down Expand Up @@ -1347,3 +1351,210 @@ describe('address generation and index methods', () => {
expect(subsetWallet1?.maxWalletIndex).toBe(15);
});
});

describe('token creation mapping methods', () => {
test('insertTokenCreation and getTokensCreatedByTx', async () => {
expect.hasAssertions();

const tokenId1 = 'token001';
const tokenId2 = 'token002';
const tokenId3 = 'token003';
const txId1 = 'tx001';
const txId2 = 'tx002';

// First, add tokens to the token table
await addToTokenTable(mysql, [
{ id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 },
{ id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 },
{ id: tokenId3, name: 'Token 3', symbol: 'TK3', transactions: 0 },
]);

// Insert token creation mappings
// tx001 creates token1 and token2 (like a nano contract creating multiple tokens)
await insertTokenCreation(mysql, tokenId1, txId1, 'block001');
await insertTokenCreation(mysql, tokenId2, txId1, 'block001');
// tx002 creates token3
await insertTokenCreation(mysql, tokenId3, txId2, 'block002');

// Get tokens created by tx001
const tokensFromTx1 = await getTokensCreatedByTx(mysql, txId1);
expect(tokensFromTx1).toHaveLength(2);
expect(tokensFromTx1).toContain(tokenId1);
expect(tokensFromTx1).toContain(tokenId2);

// Get tokens created by tx002
const tokensFromTx2 = await getTokensCreatedByTx(mysql, txId2);
expect(tokensFromTx2).toHaveLength(1);
expect(tokensFromTx2).toContain(tokenId3);

// Query non-existent transaction
const tokensFromNonExistent = await getTokensCreatedByTx(mysql, 'nonexistent');
expect(tokensFromNonExistent).toHaveLength(0);
});

test('deleteTokens', async () => {
expect.hasAssertions();

const tokenId1 = 'token001';
const tokenId2 = 'token002';
const tokenId3 = 'token003';

// Add tokens to token table
await addToTokenTable(mysql, [
{ id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 },
{ id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 },
{ id: tokenId3, name: 'Token 3', symbol: 'TK3', transactions: 0 },
]);

// Verify tokens exist
let token1 = await getTokenInformation(mysql, tokenId1);
expect(token1).toBeDefined();
expect(token1?.name).toBe('Token 1');

// Delete token1 and token2
await deleteTokens(mysql, [tokenId1, tokenId2]);

// Verify token1 and token2 are gone
token1 = await getTokenInformation(mysql, tokenId1);
expect(token1).toBeNull();

const token2 = await getTokenInformation(mysql, tokenId2);
expect(token2).toBeNull();

// Verify token3 still exists
const token3 = await getTokenInformation(mysql, tokenId3);
expect(token3).toBeDefined();
expect(token3?.name).toBe('Token 3');

// Delete with empty array should not throw
await expect(deleteTokens(mysql, [])).resolves.not.toThrow();
});

test('token deletion cascade with token_creation table', async () => {
expect.hasAssertions();

const tokenId1 = 'token001';
const tokenId2 = 'token002';
const txId1 = 'tx001';

// Add tokens
await addToTokenTable(mysql, [
{ id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 },
{ id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 },
]);

// Insert mappings
await insertTokenCreation(mysql, tokenId1, txId1, 'block001');
await insertTokenCreation(mysql, tokenId2, txId1, 'block001');

// Verify mappings exist
let tokens = await getTokensCreatedByTx(mysql, txId1);
expect(tokens).toHaveLength(2);

// Delete the tokens (should cascade to token_creation due to FK)
await deleteTokens(mysql, [tokenId1, tokenId2]);

// Verify mappings are also deleted
tokens = await getTokensCreatedByTx(mysql, txId1);
expect(tokens).toHaveLength(0);
});

test('getReexecNanoTokens should only return nano-created tokens', async () => {
expect.hasAssertions();

const txId = 'hybrid-tx-001';
// Traditional CREATE_TOKEN_TX token: token_id = tx_id
const traditionalTokenId = txId;
// Nano-created tokens: token_id != tx_id
const nanoTokenId1 = 'nano-token-001';
const nanoTokenId2 = 'nano-token-002';

const blockA = 'block-A';
const blockB = 'block-B';

// Add tokens to token table
await addToTokenTable(mysql, [
{ id: traditionalTokenId, name: 'Hybrid Token', symbol: 'HYB', transactions: 0 },
{ id: nanoTokenId1, name: 'Nano Token 1', symbol: 'NC1', transactions: 0 },
{ id: nanoTokenId2, name: 'Nano Token 2', symbol: 'NC2', transactions: 0 },
]);

// Insert token creation mappings:
// - Traditional token has first_block = null (created in mempool)
// - Nano tokens have first_block = blockA
await insertTokenCreation(mysql, traditionalTokenId, txId, null);
await insertTokenCreation(mysql, nanoTokenId1, txId, blockA);
await insertTokenCreation(mysql, nanoTokenId2, txId, blockA);

// Query for tokens with different first_block than blockB
// Should return nano tokens (blockA != blockB) but NOT traditional token (token_id = tx_id)
const tokensWithDifferentBlock = await getReexecNanoTokens(mysql, txId, blockB);

expect(tokensWithDifferentBlock).toHaveLength(2);
expect(tokensWithDifferentBlock).toContain(nanoTokenId1);
expect(tokensWithDifferentBlock).toContain(nanoTokenId2);
expect(tokensWithDifferentBlock).not.toContain(traditionalTokenId);
});

test('getReexecNanoTokens should not return tokens with same first_block', async () => {
expect.hasAssertions();

const txId = 'nano-tx-001';
const nanoTokenId = 'nano-token-001';
const blockA = 'block-A';

// Add token
await addToTokenTable(mysql, [
{ id: nanoTokenId, name: 'Nano Token', symbol: 'NCT', transactions: 0 },
]);

// Insert mapping with first_block = blockA
await insertTokenCreation(mysql, nanoTokenId, txId, blockA);

// Query with same first_block - should return empty
const tokens = await getReexecNanoTokens(mysql, txId, blockA);
expect(tokens).toHaveLength(0);
});

test('getReexecNanoTokens should handle null first_block queries', async () => {
expect.hasAssertions();

const txId = 'nano-tx-001';
const nanoTokenId = 'nano-token-001';
const blockA = 'block-A';

// Add token
await addToTokenTable(mysql, [
{ id: nanoTokenId, name: 'Nano Token', symbol: 'NCT', transactions: 0 },
]);

// Insert mapping with first_block = blockA
await insertTokenCreation(mysql, nanoTokenId, txId, blockA);

// Query with null first_block - should return the token since blockA != null
const tokens = await getReexecNanoTokens(mysql, txId, null);
expect(tokens).toHaveLength(1);
expect(tokens).toContain(nanoTokenId);
});

test('getReexecNanoTokens should not return traditional tokens even with different first_block', async () => {
expect.hasAssertions();

const txId = 'create-token-tx-001';
// Traditional CREATE_TOKEN_TX: token_id = tx_id
const traditionalTokenId = txId;

// Add token
await addToTokenTable(mysql, [
{ id: traditionalTokenId, name: 'My Token', symbol: 'MTK', transactions: 0 },
]);

// Insert mapping with first_block = null (traditional token)
await insertTokenCreation(mysql, traditionalTokenId, txId, null);

// Query with a block hash - should NOT return the traditional token
// even though null != 'some-block' because token_id = tx_id
const tokens = await getReexecNanoTokens(mysql, txId, 'some-block');
expect(tokens).toHaveLength(0);
});
});
25 changes: 24 additions & 1 deletion packages/daemon/__tests__/guards/guards.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
metadataVoided,
metadataNewTx,
metadataFirstBlock,
metadataNcExecVoided,
metadataChanged,
vertexAccepted,
invalidPeerId,
Expand All @@ -13,6 +14,7 @@ import {
unchanged,
invalidNetwork,
reorgStarted,
tokenCreated,
hasNewEvents,
} from '../../src/guards';
import { EventTypes } from '../../src/types';
Expand Down Expand Up @@ -97,7 +99,7 @@ const generateFullNodeEvent = (type: FullNodeEventTypes, data = {} as any): Even
return generateStandardFullNodeEvent(type, data);
};

const generateMetadataDecidedEvent = (type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE'): Event => {
const generateMetadataDecidedEvent = (type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE' | 'NC_EXEC_VOIDED'): Event => {
const fullNodeEvent: StandardFullNodeEvent = {
stream_id: '',
peer_id: '',
Expand Down Expand Up @@ -180,6 +182,17 @@ describe('metadata decided tests', () => {
// Any event other than METADATA_DECIDED should return false:
expect(() => metadataIgnore(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toThrow('Invalid event type on metadataIgnore guard: FULLNODE_EVENT');
});

test('metadataNcExecVoided', () => {
expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('NC_EXEC_VOIDED'))).toBe(true);
expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_VOIDED'))).toBe(false);
expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('IGNORE'))).toBe(false);
expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toBe(false);
expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_FIRST_BLOCK'))).toBe(false);

// Any event other than METADATA_DECIDED should throw:
expect(() => metadataNcExecVoided(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toThrow('Invalid event type on metadataNcExecVoided guard: FULLNODE_EVENT');
});
});

describe('fullnode event guards', () => {
Expand Down Expand Up @@ -248,6 +261,16 @@ describe('fullnode event guards', () => {
// Any event other than FULLNODE_EVENT should throw
expect(() => reorgStarted(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toThrow('Invalid event type on reorgStarted guard: METADATA_DECIDED');
});

test('tokenCreated', () => {
expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.TOKEN_CREATED))).toBe(true);
expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.NEW_VERTEX_ACCEPTED))).toBe(false);
expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toBe(false);
expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.REORG_STARTED))).toBe(false);

// Any event other than FULLNODE_EVENT should throw
expect(() => tokenCreated(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toThrow('Invalid event type on tokenCreated guard: METADATA_DECIDED');
});
});

describe('fullnode validation guards', () => {
Expand Down
4 changes: 4 additions & 0 deletions packages/daemon/__tests__/integration/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ export const SINGLE_VOIDED_CREATE_TOKEN_TRANSACTION_LAST_EVENT = 50;
export const SINGLE_VOIDED_REGULAR_TRANSACTION_PORT = 8092;
export const SINGLE_VOIDED_REGULAR_TRANSACTION_LAST_EVENT = 60;

export const TOKEN_CREATION_PORT = 8093;
export const TOKEN_CREATION_LAST_EVENT = 45;

export const SCENARIOS = [
'UNVOIDED_SCENARIO',
'REORG_SCENARIO',
Expand All @@ -61,4 +64,5 @@ export const SCENARIOS = [
'VOIDED_TOKEN_AUTHORITY',
'SINGLE_VOIDED_CREATE_TOKEN_TRANSACTION',
'SINGLE_VOIDED_REGULAR_TRANSACTION',
'TOKEN_CREATION',
];
22 changes: 22 additions & 0 deletions packages/daemon/__tests__/integration/scripts/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -115,5 +115,27 @@ services:
ports:
- "8092:8080"

token_creation:
image: hathornetwork/hathor-core:experimental-token-creation-scenario
entrypoint: ["python", "-m", "hathor"]
command: [
"events_simulator",
"--scenario", "TOKEN_CREATED",
"--seed", "1"
]
ports:
- "8093:8080"

token_created_hybrid_with_reorg:
image: hathornetwork/hathor-core:experimental-token-creation-scenario
entrypoint: ["python", "-m", "hathor"]
command: [
"events_simulator",
"--scenario", "TOKEN_CREATED_HYBRID_WITH_REORG",
"--seed", "1"
]
ports:
- "8094:8080"

networks:
database:
Loading