diff --git a/db/migrations/20251128000000-create-token-creation.js b/db/migrations/20251128000000-create-token-creation.js new file mode 100644 index 00000000..6caf4ff4 --- /dev/null +++ b/db/migrations/20251128000000-create-token-creation.js @@ -0,0 +1,39 @@ +'use strict'; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable('token_creation', { + token_id: { + type: Sequelize.STRING(64), + allowNull: false, + primaryKey: true, + references: { + model: 'token', + key: 'id', + }, + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + }, + tx_id: { + type: Sequelize.STRING(64), + allowNull: false, + comment: 'Transaction ID that created the token (regular or nano contract)', + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.literal('CURRENT_TIMESTAMP'), + }, + }); + + // Add index on tx_id for efficient lookups when voiding transactions + await queryInterface.addIndex('token_creation', ['tx_id'], { + name: 'token_creation_tx_id_idx', + }); + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable('token_creation'); + }, +}; diff --git a/db/migrations/20251212000000-add-first-block-to-token-creation.js b/db/migrations/20251212000000-add-first-block-to-token-creation.js new file mode 100644 index 00000000..97eb9103 --- /dev/null +++ b/db/migrations/20251212000000-add-first-block-to-token-creation.js @@ -0,0 +1,21 @@ +'use strict'; + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.addColumn('token_creation', 'first_block', { + type: Sequelize.STRING(64), + allowNull: true, + comment: 'First block hash that confirmed the nano contract execution that created this token', + }); + + await queryInterface.addIndex('token_creation', ['first_block'], { + name: 'token_creation_first_block_idx', + }); + }, + + async down(queryInterface) { + await queryInterface.removeIndex('token_creation', 'token_creation_first_block_idx'); + await queryInterface.removeColumn('token_creation', 'first_block'); + }, +}; diff --git a/packages/daemon/__tests__/db/index.test.ts b/packages/daemon/__tests__/db/index.test.ts index b5c7c375..66aaa153 100644 --- a/packages/daemon/__tests__/db/index.test.ts +++ b/packages/daemon/__tests__/db/index.test.ts @@ -32,6 +32,10 @@ import { incrementTokensTxCount, markUtxosAsVoided, storeTokenInformation, + insertTokenCreation, + getTokensCreatedByTx, + getReexecNanoTokens, + deleteTokens, unlockUtxos, unspendUtxos, updateAddressLockedBalance, @@ -1347,3 +1351,210 @@ describe('address generation and index methods', () => { expect(subsetWallet1?.maxWalletIndex).toBe(15); }); }); + +describe('token creation mapping methods', () => { + test('insertTokenCreation and getTokensCreatedByTx', async () => { + expect.hasAssertions(); + + const tokenId1 = 'token001'; + const tokenId2 = 'token002'; + const tokenId3 = 'token003'; + const txId1 = 'tx001'; + const txId2 = 'tx002'; + + // First, add tokens to the token table + await addToTokenTable(mysql, [ + { id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 }, + { id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 }, + { id: tokenId3, name: 'Token 3', symbol: 'TK3', transactions: 0 }, + ]); + + // Insert token creation mappings + // tx001 creates token1 and token2 (like a nano contract creating multiple tokens) + await insertTokenCreation(mysql, tokenId1, txId1, 'block001'); + await insertTokenCreation(mysql, tokenId2, txId1, 'block001'); + // tx002 creates token3 + await insertTokenCreation(mysql, tokenId3, txId2, 'block002'); + + // Get tokens created by tx001 + const tokensFromTx1 = await getTokensCreatedByTx(mysql, txId1); + expect(tokensFromTx1).toHaveLength(2); + expect(tokensFromTx1).toContain(tokenId1); + expect(tokensFromTx1).toContain(tokenId2); + + // Get tokens created by tx002 + const tokensFromTx2 = await getTokensCreatedByTx(mysql, txId2); + expect(tokensFromTx2).toHaveLength(1); + expect(tokensFromTx2).toContain(tokenId3); + + // Query non-existent transaction + const tokensFromNonExistent = await getTokensCreatedByTx(mysql, 'nonexistent'); + expect(tokensFromNonExistent).toHaveLength(0); + }); + + test('deleteTokens', async () => { + expect.hasAssertions(); + + const tokenId1 = 'token001'; + const tokenId2 = 'token002'; + const tokenId3 = 'token003'; + + // Add tokens to token table + await addToTokenTable(mysql, [ + { id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 }, + { id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 }, + { id: tokenId3, name: 'Token 3', symbol: 'TK3', transactions: 0 }, + ]); + + // Verify tokens exist + let token1 = await getTokenInformation(mysql, tokenId1); + expect(token1).toBeDefined(); + expect(token1?.name).toBe('Token 1'); + + // Delete token1 and token2 + await deleteTokens(mysql, [tokenId1, tokenId2]); + + // Verify token1 and token2 are gone + token1 = await getTokenInformation(mysql, tokenId1); + expect(token1).toBeNull(); + + const token2 = await getTokenInformation(mysql, tokenId2); + expect(token2).toBeNull(); + + // Verify token3 still exists + const token3 = await getTokenInformation(mysql, tokenId3); + expect(token3).toBeDefined(); + expect(token3?.name).toBe('Token 3'); + + // Delete with empty array should not throw + await expect(deleteTokens(mysql, [])).resolves.not.toThrow(); + }); + + test('token deletion cascade with token_creation table', async () => { + expect.hasAssertions(); + + const tokenId1 = 'token001'; + const tokenId2 = 'token002'; + const txId1 = 'tx001'; + + // Add tokens + await addToTokenTable(mysql, [ + { id: tokenId1, name: 'Token 1', symbol: 'TK1', transactions: 0 }, + { id: tokenId2, name: 'Token 2', symbol: 'TK2', transactions: 0 }, + ]); + + // Insert mappings + await insertTokenCreation(mysql, tokenId1, txId1, 'block001'); + await insertTokenCreation(mysql, tokenId2, txId1, 'block001'); + + // Verify mappings exist + let tokens = await getTokensCreatedByTx(mysql, txId1); + expect(tokens).toHaveLength(2); + + // Delete the tokens (should cascade to token_creation due to FK) + await deleteTokens(mysql, [tokenId1, tokenId2]); + + // Verify mappings are also deleted + tokens = await getTokensCreatedByTx(mysql, txId1); + expect(tokens).toHaveLength(0); + }); + + test('getReexecNanoTokens should only return nano-created tokens', async () => { + expect.hasAssertions(); + + const txId = 'hybrid-tx-001'; + // Traditional CREATE_TOKEN_TX token: token_id = tx_id + const traditionalTokenId = txId; + // Nano-created tokens: token_id != tx_id + const nanoTokenId1 = 'nano-token-001'; + const nanoTokenId2 = 'nano-token-002'; + + const blockA = 'block-A'; + const blockB = 'block-B'; + + // Add tokens to token table + await addToTokenTable(mysql, [ + { id: traditionalTokenId, name: 'Hybrid Token', symbol: 'HYB', transactions: 0 }, + { id: nanoTokenId1, name: 'Nano Token 1', symbol: 'NC1', transactions: 0 }, + { id: nanoTokenId2, name: 'Nano Token 2', symbol: 'NC2', transactions: 0 }, + ]); + + // Insert token creation mappings: + // - Traditional token has first_block = null (created in mempool) + // - Nano tokens have first_block = blockA + await insertTokenCreation(mysql, traditionalTokenId, txId, null); + await insertTokenCreation(mysql, nanoTokenId1, txId, blockA); + await insertTokenCreation(mysql, nanoTokenId2, txId, blockA); + + // Query for tokens with different first_block than blockB + // Should return nano tokens (blockA != blockB) but NOT traditional token (token_id = tx_id) + const tokensWithDifferentBlock = await getReexecNanoTokens(mysql, txId, blockB); + + expect(tokensWithDifferentBlock).toHaveLength(2); + expect(tokensWithDifferentBlock).toContain(nanoTokenId1); + expect(tokensWithDifferentBlock).toContain(nanoTokenId2); + expect(tokensWithDifferentBlock).not.toContain(traditionalTokenId); + }); + + test('getReexecNanoTokens should not return tokens with same first_block', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-001'; + const nanoTokenId = 'nano-token-001'; + const blockA = 'block-A'; + + // Add token + await addToTokenTable(mysql, [ + { id: nanoTokenId, name: 'Nano Token', symbol: 'NCT', transactions: 0 }, + ]); + + // Insert mapping with first_block = blockA + await insertTokenCreation(mysql, nanoTokenId, txId, blockA); + + // Query with same first_block - should return empty + const tokens = await getReexecNanoTokens(mysql, txId, blockA); + expect(tokens).toHaveLength(0); + }); + + test('getReexecNanoTokens should handle null first_block queries', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-001'; + const nanoTokenId = 'nano-token-001'; + const blockA = 'block-A'; + + // Add token + await addToTokenTable(mysql, [ + { id: nanoTokenId, name: 'Nano Token', symbol: 'NCT', transactions: 0 }, + ]); + + // Insert mapping with first_block = blockA + await insertTokenCreation(mysql, nanoTokenId, txId, blockA); + + // Query with null first_block - should return the token since blockA != null + const tokens = await getReexecNanoTokens(mysql, txId, null); + expect(tokens).toHaveLength(1); + expect(tokens).toContain(nanoTokenId); + }); + + test('getReexecNanoTokens should not return traditional tokens even with different first_block', async () => { + expect.hasAssertions(); + + const txId = 'create-token-tx-001'; + // Traditional CREATE_TOKEN_TX: token_id = tx_id + const traditionalTokenId = txId; + + // Add token + await addToTokenTable(mysql, [ + { id: traditionalTokenId, name: 'My Token', symbol: 'MTK', transactions: 0 }, + ]); + + // Insert mapping with first_block = null (traditional token) + await insertTokenCreation(mysql, traditionalTokenId, txId, null); + + // Query with a block hash - should NOT return the traditional token + // even though null != 'some-block' because token_id = tx_id + const tokens = await getReexecNanoTokens(mysql, txId, 'some-block'); + expect(tokens).toHaveLength(0); + }); +}); diff --git a/packages/daemon/__tests__/guards/guards.test.ts b/packages/daemon/__tests__/guards/guards.test.ts index 42ac6a30..0fc49a32 100644 --- a/packages/daemon/__tests__/guards/guards.test.ts +++ b/packages/daemon/__tests__/guards/guards.test.ts @@ -4,6 +4,7 @@ import { metadataVoided, metadataNewTx, metadataFirstBlock, + metadataNcExecVoided, metadataChanged, vertexAccepted, invalidPeerId, @@ -13,6 +14,7 @@ import { unchanged, invalidNetwork, reorgStarted, + tokenCreated, hasNewEvents, } from '../../src/guards'; import { EventTypes } from '../../src/types'; @@ -97,7 +99,7 @@ const generateFullNodeEvent = (type: FullNodeEventTypes, data = {} as any): Even return generateStandardFullNodeEvent(type, data); }; -const generateMetadataDecidedEvent = (type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE'): Event => { +const generateMetadataDecidedEvent = (type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE' | 'NC_EXEC_VOIDED'): Event => { const fullNodeEvent: StandardFullNodeEvent = { stream_id: '', peer_id: '', @@ -180,6 +182,17 @@ describe('metadata decided tests', () => { // Any event other than METADATA_DECIDED should return false: expect(() => metadataIgnore(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toThrow('Invalid event type on metadataIgnore guard: FULLNODE_EVENT'); }); + + test('metadataNcExecVoided', () => { + expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('NC_EXEC_VOIDED'))).toBe(true); + expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_VOIDED'))).toBe(false); + expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('IGNORE'))).toBe(false); + expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toBe(false); + expect(metadataNcExecVoided(mockContext, generateMetadataDecidedEvent('TX_FIRST_BLOCK'))).toBe(false); + + // Any event other than METADATA_DECIDED should throw: + expect(() => metadataNcExecVoided(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toThrow('Invalid event type on metadataNcExecVoided guard: FULLNODE_EVENT'); + }); }); describe('fullnode event guards', () => { @@ -248,6 +261,16 @@ describe('fullnode event guards', () => { // Any event other than FULLNODE_EVENT should throw expect(() => reorgStarted(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toThrow('Invalid event type on reorgStarted guard: METADATA_DECIDED'); }); + + test('tokenCreated', () => { + expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.TOKEN_CREATED))).toBe(true); + expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.NEW_VERTEX_ACCEPTED))).toBe(false); + expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.VERTEX_METADATA_CHANGED))).toBe(false); + expect(tokenCreated(mockContext, generateFullNodeEvent(FullNodeEventTypes.REORG_STARTED))).toBe(false); + + // Any event other than FULLNODE_EVENT should throw + expect(() => tokenCreated(mockContext, generateMetadataDecidedEvent('TX_NEW'))).toThrow('Invalid event type on tokenCreated guard: METADATA_DECIDED'); + }); }); describe('fullnode validation guards', () => { diff --git a/packages/daemon/__tests__/integration/config.ts b/packages/daemon/__tests__/integration/config.ts index 2c02078f..3a6a421f 100644 --- a/packages/daemon/__tests__/integration/config.ts +++ b/packages/daemon/__tests__/integration/config.ts @@ -49,6 +49,9 @@ export const SINGLE_VOIDED_CREATE_TOKEN_TRANSACTION_LAST_EVENT = 50; export const SINGLE_VOIDED_REGULAR_TRANSACTION_PORT = 8092; export const SINGLE_VOIDED_REGULAR_TRANSACTION_LAST_EVENT = 60; +export const TOKEN_CREATION_PORT = 8093; +export const TOKEN_CREATION_LAST_EVENT = 45; + export const SCENARIOS = [ 'UNVOIDED_SCENARIO', 'REORG_SCENARIO', @@ -61,4 +64,5 @@ export const SCENARIOS = [ 'VOIDED_TOKEN_AUTHORITY', 'SINGLE_VOIDED_CREATE_TOKEN_TRANSACTION', 'SINGLE_VOIDED_REGULAR_TRANSACTION', + 'TOKEN_CREATION', ]; diff --git a/packages/daemon/__tests__/integration/scripts/docker-compose.yml b/packages/daemon/__tests__/integration/scripts/docker-compose.yml index 572963c8..8f57ff06 100644 --- a/packages/daemon/__tests__/integration/scripts/docker-compose.yml +++ b/packages/daemon/__tests__/integration/scripts/docker-compose.yml @@ -115,5 +115,27 @@ services: ports: - "8092:8080" + token_creation: + image: hathornetwork/hathor-core:experimental-token-creation-scenario + entrypoint: ["python", "-m", "hathor"] + command: [ + "events_simulator", + "--scenario", "TOKEN_CREATED", + "--seed", "1" + ] + ports: + - "8093:8080" + + token_created_hybrid_with_reorg: + image: hathornetwork/hathor-core:experimental-token-creation-scenario + entrypoint: ["python", "-m", "hathor"] + command: [ + "events_simulator", + "--scenario", "TOKEN_CREATED_HYBRID_WITH_REORG", + "--seed", "1" + ] + ports: + - "8094:8080" + networks: database: diff --git a/packages/daemon/__tests__/integration/token_created_hybrid_with_reorg.test.ts b/packages/daemon/__tests__/integration/token_created_hybrid_with_reorg.test.ts new file mode 100644 index 00000000..8eb01c29 --- /dev/null +++ b/packages/daemon/__tests__/integration/token_created_hybrid_with_reorg.test.ts @@ -0,0 +1,330 @@ +/** + * Copyright (c) Hathor Labs and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import * as Services from '../../src/services'; +import { SyncMachine } from '../../src/machines'; +import { interpret } from 'xstate'; +import { getDbConnection } from '../../src/db'; +import { Connection } from 'mysql2/promise'; +import { cleanDatabase, transitionUntilEvent } from './utils'; + +import { + DB_NAME, + DB_USER, + DB_PORT, + DB_PASS, + DB_ENDPOINT, +} from './config'; + +jest.mock('../../src/config', () => { + return { + __esModule: true, + default: jest.fn(() => ({})), + }; +}); + +jest.mock('../../src/utils/aws', () => { + return { + sendRealtimeTx: jest.fn(), + invokeOnTxPushNotificationRequestedLambda: jest.fn(), + }; +}); + +import getConfig from '../../src/config'; + +const TOKEN_CREATED_HYBRID_WITH_REORG_PORT = 8094; +const TOKEN_CREATED_HYBRID_WITH_REORG_LAST_EVENT = 57; + +// @ts-expect-error +getConfig.mockReturnValue({ + NETWORK: 'testnet', + SERVICE_NAME: 'daemon-test', + CONSOLE_LEVEL: 'debug', + TX_CACHE_SIZE: 100, + BLOCK_REWARD_LOCK: 300, + FULLNODE_PEER_ID: 'simulator_peer_id', + STREAM_ID: 'simulator_stream_id', + FULLNODE_NETWORK: 'unittests', + FULLNODE_HOST: `127.0.0.1:${TOKEN_CREATED_HYBRID_WITH_REORG_PORT}`, + USE_SSL: false, + DB_ENDPOINT, + DB_NAME, + DB_USER, + DB_PASS, + DB_PORT, + ACK_TIMEOUT_MS: 20000, +}); + +let mysql: Connection; + +beforeAll(async () => { + mysql = await getDbConnection(); + await cleanDatabase(mysql); +}); + +beforeEach(async () => { + await cleanDatabase(mysql); +}); + +afterAll(async () => { + jest.resetAllMocks(); + if (mysql && 'release' in mysql) { + // @ts-expect-error - pooled connection has release method + await mysql.release(); + } +}); + +// Mock checkForMissedEvents since HTTP API is not available in test simulators +jest.spyOn(Services, 'checkForMissedEvents').mockImplementation(async () => ({ + hasNewEvents: false, + events: [], +})); + +/** + * Integration test for TOKEN_CREATED with REORG scenario (HYBRID transaction). + * + * This test validates a hybrid transaction that creates tokens in TWO different ways: + * 1. Traditional CREATE_TOKEN_TX: Token created immediately when transaction hits mempool + * 2. Nano contract syscall: Token created when nano contract executes successfully + * + * Test Flow: + * 1. Hybrid transaction arrives with both CREATE_TOKEN_TX and nano headers + * 2. TOKEN_CREATED event #1: Traditional token "HYB" with nc_exec_info: null + * 3. Transaction gets confirmed in block b2 (nc_block) + * 4. Nano executes successfully (nc_execution: SUCCESS) + * 5. TOKEN_CREATED event #2: Nano-created token "NCX" with nc_exec_info: {nc_tx, nc_block} + * 6. REORG happens - a-chain (a2 → a3 → a4 → a5) becomes longer than b-chain (b1 → b2) + * 7. Block b2 gets orphaned, nc_execution changes from 'success' to 'pending' + * 8. Transaction gets re-confirmed in block a3, nc_execution goes back to 'success' + * 9. TOKEN_CREATED event #3: NCX is re-created during reorg (group_id: 0) + * 10. REORG finishes + * + * Expected Behavior: + * - HYB token (traditional) REMAINS in database throughout reorg (never deleted) + * - NCX token (nano-created) gets deleted when nc_execution is no longer 'success', then re-created when nc_execution → 'success' + * - Both tokens exist at the end + * - HYB maps to the hybrid transaction (token_id = tx_id for CREATE_TOKEN_TX) + * - NCX maps to the hybrid transaction (created by nano contract syscall) + * - NCX TOKEN_CREATED fires TWICE: once before reorg (nc_block: 124ccc...), once after reorg (nc_block: 5ffca1...) + * + * This validates that: + * - Traditional CREATE_TOKEN_TX tokens persist through reorg (not affected by nc_execution changes) + * - Nano-created tokens are deleted when nc_execution is no longer 'success' + * - Nano-created tokens are re-created when nc_execution goes back to 'success' + * - TOKEN_CREATED events are properly fired during reorg for nano-created tokens + * - Token creation mappings are correct for both token types + */ +describe('token created with reorg scenario', () => { + beforeAll(async () => { + jest.spyOn(Services, 'fetchMinRewardBlocks').mockImplementation(async () => 300); + await cleanDatabase(mysql); + }); + + it('should keep both traditional and nano-created tokens after reorg', async () => { + const machine = interpret(SyncMachine); + + // @ts-expect-error + await transitionUntilEvent(mysql, machine, TOKEN_CREATED_HYBRID_WITH_REORG_LAST_EVENT); + + // Query all tokens from the database + const [allTokens] = await mysql.query('SELECT * FROM `token`'); + + // Should have exactly 2 tokens: HYB (traditional) and NCX (nano-created) + expect(allTokens.length).toBe(2); + + // Find the HYB token (traditional CREATE_TOKEN_TX) + const hybToken = allTokens.find(t => t.symbol === 'HYB'); + expect(hybToken).toBeDefined(); + expect(hybToken?.name).toBe('HYB'); + expect(hybToken?.symbol).toBe('HYB'); + + // Find the NCX token (nano-created) + const ncxToken = allTokens.find(t => t.symbol === 'NCX'); + expect(ncxToken).toBeDefined(); + expect(ncxToken?.name).toBe('NC Extra Token'); + expect(ncxToken?.symbol).toBe('NCX'); + + // Verify token creation mappings + // HYB is a CREATE_TOKEN_TX token, so token_id = tx_id + // The HYB token itself IS the transaction + expect(hybToken!.id).toBe('0a0166cf0d73e3aaf85678f63ae4c0c87c6ca9cef138bf945837dbe7197b8b75'); + + const [hybMappings] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE token_id = ?', + [hybToken!.id] + ); + expect(hybMappings.length).toBe(1); + expect(hybMappings[0].tx_id).toBe('0a0166cf0d73e3aaf85678f63ae4c0c87c6ca9cef138bf945837dbe7197b8b75'); + + // NCX is nano-created, so it maps to the nano transaction (the hybrid tx) + const [ncxMappings] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE token_id = ?', + [ncxToken!.id] + ); + expect(ncxMappings.length).toBe(1); + // The NCX token maps to the nano transaction that created it + expect(ncxMappings[0].tx_id).toBeDefined(); + }, 30000); + + it('should create NCX token, delete it during reorg, and re-create it after reorg', async () => { + const ncxTokenId = '82d79eb32061fc69b55dad901b6daba7ce1496b7c40bf3c2709c0a14192265ee'; + + // Helper to check if NCX token exists in DB + const getNcxToken = async () => { + const [tokens] = await mysql.query( + 'SELECT * FROM `token` WHERE id = ?', + [ncxTokenId] + ); + return tokens.length > 0 ? tokens[0] : null; + }; + + // Step 1: Run until event 28 (first TOKEN_CREATED for NCX) + // NCX should be created when nc_execution = success + await cleanDatabase(mysql); + const machine1 = interpret(SyncMachine); + // @ts-expect-error + await transitionUntilEvent(mysql, machine1, 28); + + const ncxAfterCreation = await getNcxToken(); + expect(ncxAfterCreation).not.toBeNull(); + expect(ncxAfterCreation?.symbol).toBe('NCX'); + expect(ncxAfterCreation?.name).toBe('NC Extra Token'); + + // Step 2: Run until event 34 (VERTEX_METADATA_CHANGED with nc_execution = pending) + // NCX should be deleted when nc_execution changes from success to pending + await cleanDatabase(mysql); + const machine2 = interpret(SyncMachine); + // @ts-expect-error + await transitionUntilEvent(mysql, machine2, 34); + + const ncxAfterReorg = await getNcxToken(); + expect(ncxAfterReorg).toBeNull(); // Token should be deleted + + // Step 3: Run until event 47 (second TOKEN_CREATED for NCX) + // NCX should be re-created when nc_execution = success again + await cleanDatabase(mysql); + const machine3 = interpret(SyncMachine); + // @ts-expect-error + await transitionUntilEvent(mysql, machine3, 47); + + const ncxAfterRecreation = await getNcxToken(); + expect(ncxAfterRecreation).not.toBeNull(); + expect(ncxAfterRecreation?.symbol).toBe('NCX'); + expect(ncxAfterRecreation?.name).toBe('NC Extra Token'); + }, 60000); + + it('should verify nano execution remains successful after reorg', async () => { + const machine = interpret(SyncMachine); + const receivedEvents: any[] = []; + + // Capture all events during sync + machine.onTransition((state) => { + if (state.context.event) { + receivedEvents.push(state.context.event); + } + }); + + // @ts-expect-error + await transitionUntilEvent(mysql, machine, TOKEN_CREATED_HYBRID_WITH_REORG_LAST_EVENT); + + // Filter for VERTEX_METADATA_CHANGED events for the nano transaction + const nanoTxHash = '0a0166cf0d73e3aaf85678f63ae4c0c87c6ca9cef138bf945837dbe7197b8b75'; + const metadataChangedEvents = receivedEvents.filter( + (e) => e.event?.type === 'VERTEX_METADATA_CHANGED' && + e.event?.data?.hash === nanoTxHash + ); + + // Find metadata changes with nc_execution: success + const successEvents = metadataChangedEvents.filter( + (e) => e.event?.data?.metadata?.nc_execution === 'success' + ); + + // The nano execution should remain at SUCCESS even after reorg + // because the transaction is included in the winning branch + expect(successEvents.length).toBeGreaterThan(0); + + // Verify the last metadata event for this tx still shows success + const lastEvent = metadataChangedEvents[metadataChangedEvents.length - 1]; + expect(lastEvent.event.data.metadata.nc_execution).toBe('success'); + expect(lastEvent.event.data.metadata.first_block).toBeDefined(); + }, 30000); + + it('should verify nano execution changes during reorg', async () => { + const machine = interpret(SyncMachine); + const receivedEvents: any[] = []; + + // Capture all events during sync + machine.onTransition((state) => { + if (state.context.event) { + receivedEvents.push(state.context.event); + } + }); + + // @ts-expect-error + await transitionUntilEvent(mysql, machine, TOKEN_CREATED_HYBRID_WITH_REORG_LAST_EVENT); + + // Filter for VERTEX_METADATA_CHANGED events for the nano transaction + const nanoTxHash = '0a0166cf0d73e3aaf85678f63ae4c0c87c6ca9cef138bf945837dbe7197b8b75'; + const metadataChangedEvents = receivedEvents.filter( + (e) => e.event?.type === 'VERTEX_METADATA_CHANGED' && + e.event?.data?.hash === nanoTxHash + ); + + // Find events within the reorg group (group_id: 0) + const reorgGroupEvents = metadataChangedEvents.filter( + (e) => e.event?.group_id === 0 + ); + + // In this scenario, if there are reorg events for the nano tx, + // the nano execution should remain 'success' because the transaction + // is re-confirmed in the winning chain + if (reorgGroupEvents.length > 0) { + const successExecEvent = reorgGroupEvents.find( + (e) => e.event?.data?.metadata?.nc_execution === 'success' && + e.event?.data?.metadata?.first_block !== null + ); + expect(successExecEvent).toBeDefined(); + } + }, 30000); + + it('should verify REORG events are properly detected', async () => { + const machine = interpret(SyncMachine); + const receivedEvents: any[] = []; + + // Capture all events during sync + machine.onTransition((state) => { + if (state.context.event) { + receivedEvents.push(state.context.event); + } + }); + + // @ts-expect-error + await transitionUntilEvent(mysql, machine, TOKEN_CREATED_HYBRID_WITH_REORG_LAST_EVENT); + + // Find REORG_STARTED and REORG_FINISHED events + const reorgStarted = receivedEvents.find( + (e) => e.event?.type === 'REORG_STARTED' + ); + const reorgFinished = receivedEvents.find( + (e) => e.event?.type === 'REORG_FINISHED' + ); + + // Verify both events exist + expect(reorgStarted).toBeDefined(); + expect(reorgFinished).toBeDefined(); + + // Verify REORG_STARTED has group_id (0 in this case) + expect(reorgStarted.event.group_id).toBe(0); + + // Verify REORG_STARTED has expected data + expect(reorgStarted.event.data.reorg_size).toBe(1); + // These hashes are deterministic from the simulator + expect(reorgStarted.event.data.previous_best_block).toBeDefined(); + expect(reorgStarted.event.data.new_best_block).toBeDefined(); + expect(reorgStarted.event.data.common_block).toBeDefined(); + }, 30000); +}); diff --git a/packages/daemon/__tests__/integration/token_creation.test.ts b/packages/daemon/__tests__/integration/token_creation.test.ts new file mode 100644 index 00000000..afcf0f3a --- /dev/null +++ b/packages/daemon/__tests__/integration/token_creation.test.ts @@ -0,0 +1,128 @@ +/** + * Copyright (c) Hathor Labs and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +import * as Services from '../../src/services'; +import { SyncMachine } from '../../src/machines'; +import { interpret } from 'xstate'; +import { getDbConnection, getTokensCreatedByTx } from '../../src/db'; +import { Connection } from 'mysql2/promise'; +import { cleanDatabase, transitionUntilEvent } from './utils'; + +import { + DB_NAME, + DB_USER, + DB_PORT, + DB_PASS, + DB_ENDPOINT, +} from './config'; + +jest.mock('../../src/config', () => { + return { + __esModule: true, + default: jest.fn(() => ({})), + }; +}); + +jest.mock('../../src/utils/aws', () => { + return { + sendRealtimeTx: jest.fn(), + invokeOnTxPushNotificationRequestedLambda: jest.fn(), + }; +}); + +import getConfig from '../../src/config'; + +const TOKEN_CREATION_PORT = 8093; +const TOKEN_CREATION_LAST_EVENT = 46; + +// @ts-expect-error +getConfig.mockReturnValue({ + NETWORK: 'testnet', + SERVICE_NAME: 'daemon-test', + CONSOLE_LEVEL: 'debug', + TX_CACHE_SIZE: 100, + BLOCK_REWARD_LOCK: 300, + FULLNODE_PEER_ID: 'simulator_peer_id', + STREAM_ID: 'simulator_stream_id', + FULLNODE_NETWORK: 'unittests', + FULLNODE_HOST: `127.0.0.1:${TOKEN_CREATION_PORT}`, + USE_SSL: false, + DB_ENDPOINT, + DB_NAME, + DB_USER, + DB_PASS, + DB_PORT, + ACK_TIMEOUT_MS: 20000, +}); + +let mysql: Connection; + +beforeAll(async () => { + mysql = await getDbConnection(); + await cleanDatabase(mysql); +}); + +beforeEach(async () => { + await cleanDatabase(mysql); +}); + +afterAll(async () => { + jest.resetAllMocks(); + if (mysql && 'release' in mysql) { + // @ts-expect-error - pooled connection has release method + await mysql.release(); + } +}); + +// Mock checkForMissedEvents since HTTP API is not available in test simulators +jest.spyOn(Services, 'checkForMissedEvents').mockImplementation(async () => ({ + hasNewEvents: false, + events: [], +})); + +describe('token creation scenario', () => { + beforeAll(async () => { + jest.spyOn(Services, 'fetchMinRewardBlocks').mockImplementation(async () => 300); + await cleanDatabase(mysql); + }); + + it('should sync and verify two tokens were created', async () => { + const machine = interpret(SyncMachine); + + // @ts-expect-error + await transitionUntilEvent(mysql, machine, TOKEN_CREATION_LAST_EVENT); + + // Query all tokens from the database + const [allTokens] = await mysql.query('SELECT * FROM `token`'); + + // We expect exactly 2 tokens to be created: + // 1. RGT (via regular CREATE_TOKEN_TX) + // 2. NC Token (via nano contract syscall) + expect(allTokens.length).toBe(2); + + // Find tokens by name + const rgtToken = allTokens.find(t => t.name === 'RGT'); + const ncToken = allTokens.find(t => t.name === 'NC Token'); + + // Verify RGT token was created + expect(rgtToken).toBeDefined(); + expect(rgtToken?.name).toBe('RGT'); + expect(rgtToken?.symbol).toBe('RGT'); + + // Verify NC Token was created + expect(ncToken).toBeDefined(); + expect(ncToken?.name).toBe('NC Token'); + expect(ncToken?.symbol).toBe('NCT'); + + // Verify token creation mappings exist + const [tokenCreationMappings] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE token_id IN (?, ?)', + [rgtToken!.id, ncToken!.id] + ); + expect(tokenCreationMappings.length).toBe(2); + }, 30000); +}); diff --git a/packages/daemon/__tests__/integration/utils/index.ts b/packages/daemon/__tests__/integration/utils/index.ts index 43822aef..f62d6d84 100644 --- a/packages/daemon/__tests__/integration/utils/index.ts +++ b/packages/daemon/__tests__/integration/utils/index.ts @@ -29,6 +29,7 @@ export const cleanDatabase = async (mysql: Connection): Promise => { 'miner', 'sync_metadata', 'token', + 'token_creation', 'transaction', 'tx_output', 'tx_proposal', @@ -186,15 +187,44 @@ export const validateWalletBalances = async ( } }; -export async function transitionUntilEvent(mysql: Connection, machine: Interpreter, eventId: number) { - return await new Promise((resolve) => { +const DEFAULT_TRANSITION_TIMEOUT_MS = 60000; // 60 seconds + +export async function transitionUntilEvent( + mysql: Connection, + machine: Interpreter, + eventId: number, + timeoutMs: number = DEFAULT_TRANSITION_TIMEOUT_MS +) { + return await new Promise((resolve, reject) => { + let resolved = false; + + const timeout = setTimeout(() => { + if (!resolved) { + resolved = true; + machine.stop(); + reject(new Error(`transitionUntilEvent timed out after ${timeoutMs}ms waiting for event ${eventId}`)); + } + }, timeoutMs); + machine.onTransition(async (state) => { - if (state.matches('CONNECTED.idle')) { - const lastSyncedEvent = await getLastSyncedEvent(mysql); - if (lastSyncedEvent?.last_event_id === eventId) { + if (resolved) return; + + try { + if (state.matches('CONNECTED.idle')) { + const lastSyncedEvent = await getLastSyncedEvent(mysql); + if (lastSyncedEvent?.last_event_id === eventId) { + resolved = true; + clearTimeout(timeout); + machine.stop(); + resolve(); + } + } + } catch (error) { + if (!resolved) { + resolved = true; + clearTimeout(timeout); machine.stop(); - - resolve(); + reject(error); } } }); diff --git a/packages/daemon/__tests__/services/services.test.ts b/packages/daemon/__tests__/services/services.test.ts index 6b874bd4..892624fc 100644 --- a/packages/daemon/__tests__/services/services.test.ts +++ b/packages/daemon/__tests__/services/services.test.ts @@ -22,6 +22,8 @@ import { getAddressWalletInfo, storeTokenInformation, getMaxIndicesForWallets, + getTokensCreatedByTx, + deleteTokens, } from '../../src/db'; import { fetchInitialState, @@ -32,6 +34,7 @@ import { metadataDiff, handleReorgStarted, checkForMissedEvents, + handleNcExecVoided, } from '../../src/services'; import logger from '../../src/logger'; import { @@ -93,6 +96,9 @@ jest.mock('../../src/db', () => ({ getMaxIndicesForWallets: jest.fn(() => new Map([ ['wallet1', { maxAmongAddresses: 10, maxWalletIndex: 15 }] ])), + getTokensCreatedByTx: jest.fn(() => []), + deleteTokens: jest.fn(), + insertTokenCreation: jest.fn(), })); jest.mock('../../src/utils', () => ({ @@ -629,7 +635,7 @@ describe('handleVertexAccepted', () => { expect(mockDb.destroy).toHaveBeenCalled(); }); - it('should handle add tokens to database on token creation tx', async () => { + it('should handle token creation tx without storing token info (tokens created via TOKEN_CREATED event)', async () => { const tokenName = 'TEST_TOKEN'; const tokenSymbol = 'TST_TKN'; const hash = '000013f562dc216890f247688028754a49d21dbb2b1f7731f840dc65585b1d57'; @@ -679,7 +685,7 @@ describe('handleVertexAccepted', () => { await handleVertexAccepted(context as any, {} as any); - expect(storeTokenInformation).toHaveBeenCalledWith(mockDb, hash, tokenName, tokenSymbol); + expect(storeTokenInformation).not.toHaveBeenCalled(); expect(mockDb.commit).toHaveBeenCalled(); expect(mockDb.destroy).toHaveBeenCalled(); }); @@ -828,24 +834,129 @@ describe('metadataDiff', () => { expect(result.type).toBe('IGNORE'); }); - it('should return IGNORE for other scenarios', async () => { + it('should return IGNORE when nc_execution is not success but no nano tokens exist', async () => { const event = { event: { event: { data: { hash: 'mockHash', - metadata: { voided_by: [], first_block: [] }, + metadata: { voided_by: [], first_block: [], nc_execution: 'pending' }, + }, + }, + }, + }; + const mockDbTransaction = { height: null, voided: false }; + (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([]); // No tokens + + const result = await metadataDiff({} as any, event as any); + expect(result.type).toBe('IGNORE'); + }); + + it('should return IGNORE when nc_execution is success', async () => { + const event = { + event: { + event: { + data: { + hash: 'mockHash', + metadata: { voided_by: [], first_block: [], nc_execution: 'success' }, + }, + }, + }, + }; + const mockDbTransaction = { height: null, voided: false }; + (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + + const result = await metadataDiff({} as any, event as any); + expect(result.type).toBe('IGNORE'); + // Should not call getTokensCreatedByTx when nc_execution is success + expect(getTokensCreatedByTx).not.toHaveBeenCalled(); + }); + + it('should return NC_EXEC_VOIDED when nc_execution changes from success and nano tokens exist', async () => { + const txHash = 'nano-tx-hash'; + const event = { + event: { + event: { + data: { + hash: txHash, + metadata: { voided_by: [], first_block: [], nc_execution: 'pending' }, + }, + }, + }, + }; + const mockDbTransaction = { height: 1, voided: false }; + (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + // Return nano-created tokens (token_id != tx_id) + (getTokensCreatedByTx as jest.Mock).mockResolvedValue(['nano-token-001', 'nano-token-002']); + + const result = await metadataDiff({} as any, event as any); + expect(result.type).toBe('NC_EXEC_VOIDED'); + expect(getTokensCreatedByTx).toHaveBeenCalledWith(expect.anything(), txHash); + }); + + it('should return IGNORE when nc_execution is not success but only traditional tokens exist', async () => { + const txHash = 'create-token-tx-hash'; + const event = { + event: { + event: { + data: { + hash: txHash, + metadata: { voided_by: [], first_block: [], nc_execution: 'pending' }, }, }, }, }; const mockDbTransaction = { height: 1, voided: false }; (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + // Return only traditional token (token_id = tx_id) + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([txHash]); const result = await metadataDiff({} as any, event as any); expect(result.type).toBe('IGNORE'); }); + it('should return NC_EXEC_VOIDED for hybrid tx with both traditional and nano tokens', async () => { + const txHash = 'hybrid-tx-hash'; + const event = { + event: { + event: { + data: { + hash: txHash, + metadata: { voided_by: [], first_block: [], nc_execution: 'pending' }, + }, + }, + }, + }; + const mockDbTransaction = { height: 1, voided: false }; + (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + // Return both traditional (token_id = tx_id) and nano tokens + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([txHash, 'nano-token-001']); + + const result = await metadataDiff({} as any, event as any); + expect(result.type).toBe('NC_EXEC_VOIDED'); + }); + + it('should return NC_EXEC_VOIDED when nc_execution is null and nano tokens exist', async () => { + const txHash = 'nano-tx-hash'; + const event = { + event: { + event: { + data: { + hash: txHash, + metadata: { voided_by: [], first_block: [], nc_execution: null }, + }, + }, + }, + }; + const mockDbTransaction = { height: 1, voided: false }; + (getTransactionById as jest.Mock).mockResolvedValue(mockDbTransaction); + (getTokensCreatedByTx as jest.Mock).mockResolvedValue(['nano-token-001']); + + const result = await metadataDiff({} as any, event as any); + expect(result.type).toBe('NC_EXEC_VOIDED'); + }); + it('should handle errors and destroy the database connection', async () => { const event = { event: { @@ -1229,3 +1340,135 @@ describe('checkForMissedEvents', () => { ); }); }); + +describe('handleNcExecVoided', () => { + const mockDb = { + beginTransaction: jest.fn(), + commit: jest.fn(), + rollback: jest.fn(), + destroy: jest.fn(), + }; + + beforeEach(() => { + jest.clearAllMocks(); + (getDbConnection as jest.Mock).mockResolvedValue(mockDb); + }); + + it('should not delete any tokens when no tokens exist for the transaction', async () => { + const txHash = 'tx-without-tokens'; + const context = { + event: { + event: { + id: 100, + data: { + hash: txHash, + }, + }, + }, + }; + + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([]); + + await handleNcExecVoided(context as any); + + expect(getTokensCreatedByTx).toHaveBeenCalledWith(mockDb, txHash); + expect(deleteTokens).not.toHaveBeenCalled(); + expect(mockDb.commit).toHaveBeenCalled(); + }); + + it('should delete only nano-created tokens when tokens exist', async () => { + const txHash = 'nano-tx-hash'; + const nanoToken1 = 'nano-token-001'; + const nanoToken2 = 'nano-token-002'; + const context = { + event: { + event: { + id: 100, + data: { + hash: txHash, + }, + }, + }, + }; + + // Nano tokens have token_id != tx_id + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([nanoToken1, nanoToken2]); + + await handleNcExecVoided(context as any); + + expect(getTokensCreatedByTx).toHaveBeenCalledWith(mockDb, txHash); + expect(deleteTokens).toHaveBeenCalledWith(mockDb, [nanoToken1, nanoToken2]); + expect(mockDb.commit).toHaveBeenCalled(); + }); + + it('should NOT delete traditional CREATE_TOKEN_TX tokens (where token_id = tx_id)', async () => { + const txHash = 'create-token-tx-hash'; + const context = { + event: { + event: { + id: 100, + data: { + hash: txHash, + }, + }, + }, + }; + + // Traditional CREATE_TOKEN_TX has token_id = tx_id + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([txHash]); + + await handleNcExecVoided(context as any); + + expect(getTokensCreatedByTx).toHaveBeenCalledWith(mockDb, txHash); + expect(deleteTokens).not.toHaveBeenCalled(); // Should NOT delete traditional token + expect(mockDb.commit).toHaveBeenCalled(); + }); + + it('should delete nano tokens but keep traditional token in hybrid transaction', async () => { + const txHash = 'hybrid-tx-hash'; + const nanoToken = 'nano-created-token'; + const context = { + event: { + event: { + id: 100, + data: { + hash: txHash, + }, + }, + }, + }; + + // Hybrid tx has both: traditional (token_id = tx_id) and nano (token_id != tx_id) + (getTokensCreatedByTx as jest.Mock).mockResolvedValue([txHash, nanoToken]); + + await handleNcExecVoided(context as any); + + expect(getTokensCreatedByTx).toHaveBeenCalledWith(mockDb, txHash); + // Should only delete nano token, not the traditional one + expect(deleteTokens).toHaveBeenCalledWith(mockDb, [nanoToken]); + expect(mockDb.commit).toHaveBeenCalled(); + }); + + it('should rollback on error and rethrow', async () => { + const txHash = 'error-tx-hash'; + const context = { + event: { + event: { + id: 100, + data: { + hash: txHash, + }, + }, + }, + }; + + const error = new Error('Database error'); + (getTokensCreatedByTx as jest.Mock).mockRejectedValue(error); + + await expect(handleNcExecVoided(context as any)).rejects.toThrow('Database error'); + + expect(mockDb.rollback).toHaveBeenCalled(); + expect(mockDb.commit).not.toHaveBeenCalled(); + expect(logger.error).toHaveBeenCalledWith('handleNcExecVoided error: ', error); + }); +}); diff --git a/packages/daemon/__tests__/services/services_with_db.test.ts b/packages/daemon/__tests__/services/services_with_db.test.ts index 5ad79238..04319680 100644 --- a/packages/daemon/__tests__/services/services_with_db.test.ts +++ b/packages/daemon/__tests__/services/services_with_db.test.ts @@ -6,7 +6,7 @@ */ import * as db from '../../src/db'; -import { handleVoidedTx, voidTx } from '../../src/services'; +import { handleVoidedTx, voidTx, handleTokenCreated } from '../../src/services'; import { LRU } from '../../src/utils'; import { addOrUpdateTx, @@ -950,4 +950,800 @@ describe('wallet balance voiding bug', () => { expect(utxo2AfterVoid!.txProposalIndex).toBeNull(); expect(utxo2AfterVoid!.spentBy).toBeNull(); }); + + it('should delete tokens when voiding transaction that created them', async () => { + expect.hasAssertions(); + await cleanDatabase(mysql); + + const txId = 'nano-tx-001'; + const tokenId1 = 'token001'; + const tokenId2 = 'token002'; + const tokenId3 = 'token003'; + + // Add tokens to database + await db.storeTokenInformation(mysql, tokenId1, 'Token 1', 'TK1'); + await db.storeTokenInformation(mysql, tokenId2, 'Token 2', 'TK2'); + await db.storeTokenInformation(mysql, tokenId3, 'Token 3', 'TK3'); + + // Create mappings (simulate nano contract creating multiple tokens) + await db.insertTokenCreation(mysql, tokenId1, txId, 'block-001'); + await db.insertTokenCreation(mysql, tokenId2, txId, 'block-001'); + await db.insertTokenCreation(mysql, tokenId3, txId, 'block-001'); + + // Verify tokens and mappings exist + let token1 = await db.getTokenInformation(mysql, tokenId1); + expect(token1).not.toBeNull(); + let tokens = await db.getTokensCreatedByTx(mysql, txId); + expect(tokens).toHaveLength(3); + + // Void the transaction with empty inputs/outputs/tokens + await voidTx(mysql, txId, [], [], [], [], 1); + + // Verify all tokens created by this tx were deleted + token1 = await db.getTokenInformation(mysql, tokenId1); + expect(token1).toBeNull(); + + const token2 = await db.getTokenInformation(mysql, tokenId2); + expect(token2).toBeNull(); + + const token3 = await db.getTokenInformation(mysql, tokenId3); + expect(token3).toBeNull(); + + // Verify mappings were also deleted + tokens = await db.getTokensCreatedByTx(mysql, txId); + expect(tokens).toHaveLength(0); + }); +}); + +describe('handleTokenCreated (db)', () => { + beforeEach(async () => { + await cleanDatabase(mysql); + jest.clearAllMocks(); + }); + + it('should store token and create mapping', async () => { + expect.hasAssertions(); + + const tokenId = 'token-uid-001'; + const txId = 'tx-001'; + const tokenName = 'My Token'; + const tokenSymbol = 'MTK'; + + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 10, + event: { + id: 11, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-001', + }, + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 1000000, + }, + group_id: null, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Verify token was stored + const token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + expect(token?.name).toBe(tokenName); + expect(token?.symbol).toBe(tokenSymbol); + + // Verify mapping was created + const tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(tokenId); + + // Verify last synced event was updated + const lastEvent = await db.getLastSyncedEvent(mysql); + expect(lastEvent).not.toBeNull(); + expect(lastEvent?.last_event_id).toBe(11); + }); + + it('should handle multiple tokens from same nano contract', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-001'; + const tokenId1 = 'token-uid-001'; + const tokenId2 = 'token-uid-002'; + + // Create first TOKEN_CREATED event + const context1 = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 10, + event: { + id: 11, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId1, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-001', + }, + token_name: 'Token 1', + token_symbol: 'TK1', + token_version: 'TOKEN_VERSION_1', + initial_amount: 1000000, + }, + group_id: null, + }, + }, + }; + + // Create second TOKEN_CREATED event + const context2 = { + ...context1, + event: { + ...context1.event, + event: { + ...context1.event.event, + id: 12, + data: { + token_uid: tokenId2, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-001', + }, + token_name: 'Token 2', + token_symbol: 'TK2', + token_version: 'TOKEN_VERSION_1', + initial_amount: 2000000, + }, + }, + }, + }; + + await handleTokenCreated(context1 as any); + await handleTokenCreated(context2 as any); + + // Verify both tokens were stored + const token1 = await db.getTokenInformation(mysql, tokenId1); + expect(token1).not.toBeNull(); + expect(token1?.name).toBe('Token 1'); + + const token2 = await db.getTokenInformation(mysql, tokenId2); + expect(token2).not.toBeNull(); + expect(token2?.name).toBe('Token 2'); + + // Verify both mappings point to same tx + const tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(2); + expect(tokensCreated).toContain(tokenId1); + expect(tokensCreated).toContain(tokenId2); + }); + + it('should add new token alongside existing token with different token_id from same tx', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-reorg'; + const oldTokenId = 'old-token-uid-001'; + const newTokenId = 'new-token-uid-001'; + const tokenName = 'NC Token'; + const tokenSymbol = 'NCT'; + + // First, create an existing token (simulating previous nano execution) + await db.storeTokenInformation(mysql, oldTokenId, tokenName, tokenSymbol); + await db.insertTokenCreation(mysql, oldTokenId, txId, 'block-001'); + + // Verify old token exists + let oldToken = await db.getTokenInformation(mysql, oldTokenId); + expect(oldToken).not.toBeNull(); + expect(oldToken?.name).toBe(tokenName); + + // Verify mapping exists for old token + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(oldTokenId); + + // Now simulate a TOKEN_CREATED event with a new token_id (due to reorg) + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 20, + event: { + id: 21, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: newTokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-002', + }, + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 1000000, + }, + group_id: 0, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Old token is DELETED because first_block changed from 'block-001' to 'block-002' + // getReexecNanoTokens finds tokens with different first_block and deletes them + oldToken = await db.getTokenInformation(mysql, oldTokenId); + expect(oldToken).toBeNull(); + + // Verify new token was created + const newToken = await db.getTokenInformation(mysql, newTokenId); + expect(newToken).not.toBeNull(); + expect(newToken?.name).toBe(tokenName); + expect(newToken?.symbol).toBe(tokenSymbol); + + // Only new token is mapped to the tx (old one was deleted) + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated).toContain(newTokenId); + + // Verify last synced event was updated + const lastEvent = await db.getLastSyncedEvent(mysql); + expect(lastEvent).not.toBeNull(); + expect(lastEvent?.last_event_id).toBe(21); + }); + + it('should delete old tokens when new TOKEN_CREATED arrives with different first_block', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-multiple-reorg'; + const oldTokenId1 = 'old-token-001'; + const oldTokenId2 = 'old-token-002'; + const newTokenId = 'new-token-001'; + + // Create two existing tokens from previous nano execution + await db.storeTokenInformation(mysql, oldTokenId1, 'Old Token 1', 'OT1'); + await db.insertTokenCreation(mysql, oldTokenId1, txId, 'block-001'); + + await db.storeTokenInformation(mysql, oldTokenId2, 'Old Token 2', 'OT2'); + await db.insertTokenCreation(mysql, oldTokenId2, txId, 'block-001'); + + // Verify both old tokens exist + let oldToken1 = await db.getTokenInformation(mysql, oldTokenId1); + let oldToken2 = await db.getTokenInformation(mysql, oldTokenId2); + expect(oldToken1).not.toBeNull(); + expect(oldToken2).not.toBeNull(); + + // Verify both mappings exist + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(2); + expect(tokensCreated).toContain(oldTokenId1); + expect(tokensCreated).toContain(oldTokenId2); + + // Now simulate a TOKEN_CREATED event with a new token_id + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 30, + event: { + id: 31, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: newTokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-003', + }, + token_name: 'New Token', + token_symbol: 'NT', + token_version: 'TOKEN_VERSION_1', + initial_amount: 3000000, + }, + group_id: 0, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Old tokens are DELETED because first_block changed from 'block-001' to 'block-003' + // getReexecNanoTokens finds tokens with different first_block and deletes them + oldToken1 = await db.getTokenInformation(mysql, oldTokenId1); + oldToken2 = await db.getTokenInformation(mysql, oldTokenId2); + expect(oldToken1).toBeNull(); + expect(oldToken2).toBeNull(); + + // Verify new token was created + const newToken = await db.getTokenInformation(mysql, newTokenId); + expect(newToken).not.toBeNull(); + expect(newToken?.name).toBe('New Token'); + expect(newToken?.symbol).toBe('NT'); + + // Only new token is mapped to the tx (old ones were deleted) + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated).toContain(newTokenId); + }); + + it('should handle TOKEN_CREATED when no existing tokens exist', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-fresh'; + const tokenId = 'token-uid-fresh'; + const tokenName = 'Fresh Token'; + const tokenSymbol = 'FRT'; + + // Verify no tokens exist for this tx initially + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(0); + + // Simulate a TOKEN_CREATED event + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 40, + event: { + id: 41, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: 'block-004', + }, + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 4000000, + }, + group_id: null, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Verify token was created + const token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + expect(token?.name).toBe(tokenName); + expect(token?.symbol).toBe(tokenSymbol); + + // Verify mapping was created + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(tokenId); + }); + + it('should store first_block when token is created via nano contract', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-with-block'; + const tokenId = 'token-with-first-block'; + const firstBlock = 'block-hash-123'; + const tokenName = 'Block Token'; + const tokenSymbol = 'BLK'; + + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 50, + event: { + id: 51, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: firstBlock, + }, + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 5000000, + }, + group_id: null, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Verify token was created + const token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + + // Verify first_block was stored in token_creation table + const [rows] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE `token_id` = ?', + [tokenId] + ); + expect(rows).toHaveLength(1); + expect(rows[0].tx_id).toBe(txId); + expect(rows[0].first_block).toBe(firstBlock); + }); + + it('should store null first_block for traditional CREATE_TOKEN_TX tokens', async () => { + expect.hasAssertions(); + + const tokenId = 'create-token-tx-001'; + const tokenName = 'Traditional Token'; + const tokenSymbol = 'TRD'; + + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 60, + event: { + id: 61, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId, + nc_exec_info: null, // Traditional CREATE_TOKEN_TX has no nc_exec_info + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 6000000, + }, + group_id: null, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Verify token was created + const token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + + // Verify first_block is null for traditional tokens + const [rows] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE `token_id` = ?', + [tokenId] + ); + expect(rows).toHaveLength(1); + expect(rows[0].tx_id).toBe(tokenId); // For CREATE_TOKEN_TX, tx_id = token_id + expect(rows[0].first_block).toBeNull(); + }); + + it('should handle reorg by deleting tokens with old first_block and inserting with new first_block', async () => { + expect.hasAssertions(); + + const txId = 'nano-tx-reorg-blocks'; + const tokenId = 'token-changing-blocks'; + const oldBlock = 'block-old-123'; + const newBlock = 'block-new-456'; + const tokenName = 'Reorg Token'; + const tokenSymbol = 'RGT'; + + // First, create token with old block + await db.storeTokenInformation(mysql, tokenId, tokenName, tokenSymbol); + await db.insertTokenCreation(mysql, tokenId, txId, oldBlock); + + // Verify token exists with old block + let [rows] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE `token_id` = ?', + [tokenId] + ); + expect(rows).toHaveLength(1); + expect(rows[0].first_block).toBe(oldBlock); + + // Simulate token deletion (what would happen when block is voided) + await db.deleteTokens(mysql, [tokenId]); + + // Verify token was deleted + let token = await db.getTokenInformation(mysql, tokenId); + expect(token).toBeNull(); + + // Now simulate TOKEN_CREATED event with new block + const context = { + socket: expect.any(Object), + healthcheck: expect.any(Object), + retryAttempt: 0, + initialEventId: null, + txCache: new LRU(100), + event: { + stream_id: 'stream-id', + peer_id: 'peer-id', + network: 'testnet', + type: 'FULLNODE_EVENT', + latest_event_id: 70, + event: { + id: 71, + timestamp: 1234567890.123, + type: 'TOKEN_CREATED', + data: { + token_uid: tokenId, + nc_exec_info: { + nc_tx: txId, + nc_block: newBlock, + }, + token_name: tokenName, + token_symbol: tokenSymbol, + token_version: 'TOKEN_VERSION_1', + initial_amount: 7000000, + }, + group_id: 0, + }, + }, + }; + + await handleTokenCreated(context as any); + + // Verify token was recreated + token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + + // Verify first_block is now the new block + [rows] = await mysql.query( + 'SELECT * FROM `token_creation` WHERE `token_id` = ?', + [tokenId] + ); + expect(rows).toHaveLength(1); + expect(rows[0].first_block).toBe(newBlock); + }); +}); + +describe('Nano contract token deletion on nc_execution change', () => { + beforeEach(async () => { + await cleanDatabase(mysql); + jest.clearAllMocks(); + }); + + it('should delete nano-created tokens when nc_execution changes from success to pending', async () => { + const txId = 'nano-tx-001'; + const tokenId = 'token-from-nano-001'; + + // First, create the token (simulating when nc_execution was SUCCESS) + await db.storeTokenInformation(mysql, tokenId, 'NC Token', 'NCT'); + await db.insertTokenCreation(mysql, tokenId, txId, 'block-001'); + + // Verify token exists + let token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + + // Verify mapping exists + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(tokenId); + + // Now delete tokens (simulating nc_execution changing to PENDING) + await db.deleteTokens(mysql, [tokenId]); + + // Verify token was deleted + token = await db.getTokenInformation(mysql, tokenId); + expect(token).toBeNull(); + + // Verify mapping was deleted + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(0); + }); + + it('should delete multiple nano-created tokens when nc_execution changes', async () => { + const txId = 'nano-tx-002'; + const tokenId1 = 'token-from-nano-002-1'; + const tokenId2 = 'token-from-nano-002-2'; + + // Create two tokens from the same nano contract execution + await db.storeTokenInformation(mysql, tokenId1, 'NC Token 1', 'NCT1'); + await db.insertTokenCreation(mysql, tokenId1, txId, 'block-001'); + + await db.storeTokenInformation(mysql, tokenId2, 'NC Token 2', 'NCT2'); + await db.insertTokenCreation(mysql, tokenId2, txId, 'block-001'); + + // Verify both tokens exist + let token1 = await db.getTokenInformation(mysql, tokenId1); + let token2 = await db.getTokenInformation(mysql, tokenId2); + expect(token1).not.toBeNull(); + expect(token2).not.toBeNull(); + + // Verify both mappings exist + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(2); + + // Delete all tokens created by this nano contract + // Note: cascade handles token_creation cleanup + await db.deleteTokens(mysql, tokensCreated); + + // Verify both tokens were deleted + token1 = await db.getTokenInformation(mysql, tokenId1); + token2 = await db.getTokenInformation(mysql, tokenId2); + expect(token1).toBeNull(); + expect(token2).toBeNull(); + + // Verify both mappings were deleted + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(0); + }); + + it('should allow token re-creation after deletion (idempotency test)', async () => { + const txId = 'nano-tx-003'; + const tokenId = 'token-from-nano-003'; + const tokenName = 'NC Token Recreated'; + const tokenSymbol = 'NCTR'; + + // Create token first time + await db.storeTokenInformation(mysql, tokenId, tokenName, tokenSymbol); + await db.insertTokenCreation(mysql, tokenId, txId, 'block-001'); + + // Delete it (simulating nc_execution change to PENDING) + await db.deleteTokens(mysql, [tokenId]); + + // Verify it's deleted + let token = await db.getTokenInformation(mysql, tokenId); + expect(token).toBeNull(); + + // Re-create it (simulating nano execution again after reorg) + await db.storeTokenInformation(mysql, tokenId, tokenName, tokenSymbol); + await db.insertTokenCreation(mysql, tokenId, txId, 'block-002'); + + // Verify token was re-created + token = await db.getTokenInformation(mysql, tokenId); + expect(token).not.toBeNull(); + expect(token?.name).toBe(tokenName); + expect(token?.symbol).toBe(tokenSymbol); + + // Verify mapping was re-created + const tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(tokenId); + }); +}); + +describe('Hybrid transaction token deletion scenarios', () => { + beforeEach(async () => { + await cleanDatabase(mysql); + jest.clearAllMocks(); + }); + + it('should handle hybrid transaction - keep CREATE_TOKEN_TX token when only nc_execution changes', async () => { + const txId = 'hybrid-tx-001'; + const createTokenTxTokenId = txId; // CREATE_TOKEN_TX token has same ID as tx + const nanoTokenId = 'nano-created-token-001'; + + // Step 1: CREATE_TOKEN_TX token arrives (immediately when tx hits mempool) + await db.storeTokenInformation(mysql, createTokenTxTokenId, 'Hybrid Token', 'HYB'); + await db.insertTokenCreation(mysql, createTokenTxTokenId, txId, null); + + // Step 2: Nano executes successfully and creates additional token + await db.storeTokenInformation(mysql, nanoTokenId, 'NC Token', 'NCT'); + await db.insertTokenCreation(mysql, nanoTokenId, txId, 'block-001'); + + // Verify both tokens exist + let createTokenTxToken = await db.getTokenInformation(mysql, createTokenTxTokenId); + let nanoToken = await db.getTokenInformation(mysql, nanoTokenId); + expect(createTokenTxToken).not.toBeNull(); + expect(nanoToken).not.toBeNull(); + + // Verify both mappings exist + let tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(2); + + // Step 3: Reorg happens - nc_execution changes to PENDING + // Only delete nano-created token, not the CREATE_TOKEN_TX token + await db.deleteTokens(mysql, [nanoTokenId]); + + // Verify: nano token deleted, CREATE_TOKEN_TX token remains + createTokenTxToken = await db.getTokenInformation(mysql, createTokenTxTokenId); + nanoToken = await db.getTokenInformation(mysql, nanoTokenId); + expect(createTokenTxToken).not.toBeNull(); + expect(nanoToken).toBeNull(); + + // Verify only CREATE_TOKEN_TX token mapping remains + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(1); + expect(tokensCreated[0]).toBe(createTokenTxTokenId); + + // Step 4: Nano executes again - token re-created + await db.storeTokenInformation(mysql, nanoTokenId, 'NC Token', 'NCT'); + await db.insertTokenCreation(mysql, nanoTokenId, txId, 'block-002'); + + // Verify both tokens exist again + createTokenTxToken = await db.getTokenInformation(mysql, createTokenTxTokenId); + nanoToken = await db.getTokenInformation(mysql, nanoTokenId); + expect(createTokenTxToken).not.toBeNull(); + expect(nanoToken).not.toBeNull(); + + // Verify both mappings exist again + tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensCreated).toHaveLength(2); + }); + + it('should handle hybrid transaction - delete all tokens when transaction is voided', async () => { + const txId = 'hybrid-tx-002'; + const createTokenTxTokenId = txId; + const nanoTokenId = 'nano-created-token-002'; + + // Create both tokens (CREATE_TOKEN_TX token + nano-created token) + await db.storeTokenInformation(mysql, createTokenTxTokenId, 'Hybrid Token 2', 'HYB2'); + await db.insertTokenCreation(mysql, createTokenTxTokenId, txId, null); + + await db.storeTokenInformation(mysql, nanoTokenId, 'NC Token 2', 'NCT2'); + await db.insertTokenCreation(mysql, nanoTokenId, txId, 'block-001'); + + // Verify both tokens exist + let createTokenTxToken = await db.getTokenInformation(mysql, createTokenTxTokenId); + let nanoToken = await db.getTokenInformation(mysql, nanoTokenId); + expect(createTokenTxToken).not.toBeNull(); + expect(nanoToken).not.toBeNull(); + + // Transaction becomes voided - delete ALL tokens + const tokensCreated = await db.getTokensCreatedByTx(mysql, txId); + await db.deleteTokens(mysql, tokensCreated); + + // Verify both tokens were deleted + createTokenTxToken = await db.getTokenInformation(mysql, createTokenTxTokenId); + nanoToken = await db.getTokenInformation(mysql, nanoTokenId); + expect(createTokenTxToken).toBeNull(); + expect(nanoToken).toBeNull(); + + // Verify all mappings were deleted + const tokensAfterVoid = await db.getTokensCreatedByTx(mysql, txId); + expect(tokensAfterVoid).toHaveLength(0); + }); }); diff --git a/packages/daemon/__tests__/utils.ts b/packages/daemon/__tests__/utils.ts index 8a4e35f9..d032fa3b 100644 --- a/packages/daemon/__tests__/utils.ts +++ b/packages/daemon/__tests__/utils.ts @@ -191,6 +191,7 @@ export const cleanDatabase = async (mysql: MysqlConnection): Promise => { 'address_balance', 'address_tx_history', 'token', + 'token_creation', 'tx_proposal', 'transaction', 'tx_output', diff --git a/packages/daemon/src/db/index.ts b/packages/daemon/src/db/index.ts index 68190df3..2bf24ef8 100644 --- a/packages/daemon/src/db/index.ts +++ b/packages/daemon/src/db/index.ts @@ -1149,6 +1149,94 @@ export const storeTokenInformation = async ( ); }; +/** + * Store the mapping between a token and the transaction that created it + * + * @param mysql - Database connection + * @param tokenId - The token UID + * @param txId - Transaction ID that created the token (regular or nano contract) + * @param firstBlock - First block hash that confirmed the nano contract execution (null for traditional CREATE_TOKEN_TX) + */ +export const insertTokenCreation = async ( + mysql: MysqlConnection, + tokenId: string, + txId: string, + firstBlock: string | null = null, +): Promise => { + const entry = { + token_id: tokenId, + tx_id: txId, + first_block: firstBlock, + }; + await mysql.query( + 'INSERT INTO `token_creation` SET ?', + [entry], + ); +}; + +/** + * Get all token IDs created by a specific transaction + * + * @param mysql - Database connection + * @param txId - The transaction ID (regular or nano contract) + * @returns Array of token IDs created by this transaction + */ +export const getTokensCreatedByTx = async ( + mysql: MysqlConnection, + txId: string, +): Promise => { + const [rows] = await mysql.query( + 'SELECT `token_id` FROM `token_creation` WHERE `tx_id` = ?', + [txId], + ); + return rows.map((row) => row.token_id); +}; + +/** + * Get all token IDs created by a transaction that have a different first_block than expected. + * + * This is used to detect nano-created tokens that need to be deleted during a reorg. + * When the first_block changes, the token_id might also change (even though tx_id stays the same), + * so we need to delete tokens with the old first_block and let new TOKEN_CREATED events create new ones. + * + * IMPORTANT: Excludes tokens where token_id = tx_id. These are traditional CREATE_TOKEN_TX tokens + * which should not be affected by nano reorg logic. + * + * @param mysql - Database connection + * @param txId - The transaction ID + * @param currentFirstBlock - The current first_block from the TOKEN_CREATED event + * @returns Array of nano-created token IDs that have a different first_block + */ +export const getReexecNanoTokens = async ( + mysql: MysqlConnection, + txId: string, + currentFirstBlock: string | null, +): Promise => { + const [rows] = await mysql.query( + 'SELECT `token_id` FROM `token_creation` WHERE `tx_id` = ? AND `token_id` != `tx_id` AND NOT (`first_block` <=> ?)', + [txId, currentFirstBlock], + ); + return rows.map((row) => row.token_id); +}; + +/** + * Delete tokens from the token table + * + * @param mysql - Database connection + * @param tokenIds - Array of token IDs to delete + */ +export const deleteTokens = async ( + mysql: MysqlConnection, + tokenIds: string[], +): Promise => { + if (tokenIds.length === 0) return; + + await mysql.query( + 'DELETE FROM `token` WHERE `id` IN (?)', + [tokenIds], + ); +}; + /** * Get tx inputs that are still marked as locked. * diff --git a/packages/daemon/src/guards/index.ts b/packages/daemon/src/guards/index.ts index ba850ae0..5d3a08a0 100644 --- a/packages/daemon/src/guards/index.ts +++ b/packages/daemon/src/guards/index.ts @@ -74,6 +74,20 @@ export const metadataFirstBlock = (_context: Context, event: Event) => { return event.event.type === METADATA_DIFF_EVENT_TYPES.TX_FIRST_BLOCK; }; +/* + * This guard is used during the `handlingMetadataChanged` to check if + * the result was a NC_EXEC_VOIDED event, which means nc_execution changed + * from 'success' to something else (pending, null, etc.) during a reorg. + * We need to delete any nano-created tokens for this transaction. + */ +export const metadataNcExecVoided = (_context: Context, event: Event) => { + if (event.type !== EventTypes.METADATA_DECIDED) { + throw new Error(`Invalid event type on metadataNcExecVoided guard: ${event.type}`); + } + + return event.event.type === METADATA_DIFF_EVENT_TYPES.NC_EXEC_VOIDED; +}; + /* * This guard is used on the `idle` state when an event is received * from the fullnode to detect if this event is a VERTEX_METADATA_CHANGED @@ -264,3 +278,14 @@ export const hasNewEvents = (_context: Context, event: any) => { return event.data.hasNewEvents === true; }; + +/* + * This guard is used to detect if the event is a TOKEN_CREATED event + */ +export const tokenCreated = (_context: Context, event: Event) => { + if (event.type !== EventTypes.FULLNODE_EVENT) { + throw new Error(`Invalid event type on tokenCreated guard: ${event.type}`); + } + + return event.event.event.type === FullNodeEventTypes.TOKEN_CREATED; +}; diff --git a/packages/daemon/src/machines/SyncMachine.ts b/packages/daemon/src/machines/SyncMachine.ts index 2d507ff6..150ec56b 100644 --- a/packages/daemon/src/machines/SyncMachine.ts +++ b/packages/daemon/src/machines/SyncMachine.ts @@ -22,10 +22,12 @@ import { metadataDiff, handleVoidedTx, handleTxFirstBlock, + handleNcExecVoided, updateLastSyncedEvent, fetchInitialState, handleUnvoidedTx, handleReorgStarted, + handleTokenCreated, checkForMissedEvents, } from '../services'; import { @@ -34,6 +36,7 @@ import { metadataUnvoided, metadataNewTx, metadataFirstBlock, + metadataNcExecVoided, metadataChanged, vertexAccepted, invalidPeerId, @@ -44,6 +47,7 @@ import { unchanged, vertexRemoved, reorgStarted, + tokenCreated, hasNewEvents, } from '../guards'; import { @@ -80,7 +84,9 @@ export const CONNECTED_STATES = { handlingVoidedTx: 'handlingVoidedTx', handlingUnvoidedTx: 'handlingUnvoidedTx', handlingFirstBlock: 'handlingFirstBlock', + handlingNcExecVoided: 'handlingNcExecVoided', handlingReorgStarted: 'handlingReorgStarted', + handlingTokenCreated: 'handlingTokenCreated', checkingForMissedEvents: 'checkingForMissedEvents', }; @@ -182,6 +188,10 @@ export const SyncMachine = Machine({ actions: ['storeEvent'], cond: 'reorgStarted', target: CONNECTED_STATES.handlingReorgStarted, + }, { + actions: ['storeEvent'], + cond: 'tokenCreated', + target: CONNECTED_STATES.handlingTokenCreated, }, { actions: ['storeEvent'], target: CONNECTED_STATES.handlingUnhandledEvent, @@ -215,6 +225,7 @@ export const SyncMachine = Machine({ { target: `#${CONNECTED_STATES.handlingUnvoidedTx}`, cond: 'metadataUnvoided', actions: ['unwrapEvent'] }, { target: `#${CONNECTED_STATES.handlingVertexAccepted}`, cond: 'metadataNewTx', actions: ['unwrapEvent'] }, { target: `#${CONNECTED_STATES.handlingFirstBlock}`, cond: 'metadataFirstBlock', actions: ['unwrapEvent'] }, + { target: `#${CONNECTED_STATES.handlingNcExecVoided}`, cond: 'metadataNcExecVoided', actions: ['unwrapEvent'] }, { target: `#${CONNECTED_STATES.handlingUnhandledEvent}`, cond: 'metadataIgnore' }, ], }, @@ -285,6 +296,18 @@ export const SyncMachine = Machine({ onError: `#${SYNC_MACHINE_STATES.ERROR}`, }, }, + [CONNECTED_STATES.handlingNcExecVoided]: { + id: CONNECTED_STATES.handlingNcExecVoided, + invoke: { + src: 'handleNcExecVoided', + data: (_context: Context, event: Event) => event, + onDone: { + target: 'idle', + actions: ['storeEvent', 'sendAck'], + }, + onError: `#${SYNC_MACHINE_STATES.ERROR}`, + }, + }, [CONNECTED_STATES.handlingReorgStarted]: { id: CONNECTED_STATES.handlingReorgStarted, invoke: { @@ -297,6 +320,18 @@ export const SyncMachine = Machine({ onError: `#${SYNC_MACHINE_STATES.ERROR}`, }, }, + [CONNECTED_STATES.handlingTokenCreated]: { + id: CONNECTED_STATES.handlingTokenCreated, + invoke: { + src: 'handleTokenCreated', + data: (_context: Context, event: Event) => event, + onDone: { + target: 'idle', + actions: ['sendAck', 'storeEvent'], + }, + onError: `#${SYNC_MACHINE_STATES.ERROR}`, + }, + }, [CONNECTED_STATES.checkingForMissedEvents]: { id: CONNECTED_STATES.checkingForMissedEvents, invoke: { @@ -333,8 +368,10 @@ export const SyncMachine = Machine({ handleVertexRemoved, handleVoidedTx, handleTxFirstBlock, + handleNcExecVoided, handleUnvoidedTx, handleReorgStarted, + handleTokenCreated, fetchInitialState, metadataDiff, updateLastSyncedEvent, @@ -346,6 +383,7 @@ export const SyncMachine = Machine({ metadataUnvoided, metadataNewTx, metadataFirstBlock, + metadataNcExecVoided, metadataChanged, vertexAccepted, invalidPeerId, @@ -356,6 +394,7 @@ export const SyncMachine = Machine({ unchanged, vertexRemoved, reorgStarted, + tokenCreated, hasNewEvents, }, delays: { BACKOFF_DELAYED_RECONNECT, ACK_TIMEOUT }, diff --git a/packages/daemon/src/services/index.ts b/packages/daemon/src/services/index.ts index 151733ca..f8683f17 100644 --- a/packages/daemon/src/services/index.ts +++ b/packages/daemon/src/services/index.ts @@ -59,6 +59,11 @@ import { getUtxosLockedAtHeight, addMiner, storeTokenInformation, + getTokenInformation, + insertTokenCreation, + getTokensCreatedByTx, + getReexecNanoTokens, + deleteTokens, getLockedUtxoFromInputs, incrementTokensTxCount, getAddressWalletInfo, @@ -91,6 +96,7 @@ export const METADATA_DIFF_EVENT_TYPES = { TX_UNVOIDED: 'TX_UNVOIDED', TX_NEW: 'TX_NEW', TX_FIRST_BLOCK: 'TX_FIRST_BLOCK', + NC_EXEC_VOIDED: 'NC_EXEC_VOIDED', }; const DUPLICATE_TX_ALERT_GRACE_PERIOD = 10; // seconds @@ -104,7 +110,7 @@ export const metadataDiff = async (_context: Context, event: Event) => { const fullNodeEvent = event.event as StandardFullNodeEvent; const { hash, - metadata: { voided_by, first_block }, + metadata: { voided_by, first_block, nc_execution }, } = fullNodeEvent.event.data; const dbTx: DbTransaction | null = await getTransactionById(mysql, hash); @@ -163,6 +169,22 @@ export const metadataDiff = async (_context: Context, event: Event) => { }; } + // Check if nc_execution changed from 'success' to something else. + // If the tx has nano-created tokens in the database (tokens where token_id != tx_id), + // those tokens were created when nc_execution was 'success'. + // If nc_execution is now NOT 'success', we should delete those tokens. + if (nc_execution !== 'success') { + const tokensCreated = await getTokensCreatedByTx(mysql, hash); + const nanoTokens = tokensCreated.filter(tokenId => tokenId !== hash); + + if (nanoTokens.length > 0) { + return { + type: METADATA_DIFF_EVENT_TYPES.NC_EXEC_VOIDED, + originalEvent: event, + }; + } + } + return { type: METADATA_DIFF_EVENT_TYPES.IGNORE, originalEvent: event, @@ -190,6 +212,46 @@ export function isNanoContract(headers: EventTxHeader[]) { return false; } +/** + * Handles a vertex (transaction or block) being accepted by the fullnode. + * + * This function processes VERTEX_METADATA_CHANGED and NEW_VERTEX_ACCEPTED events. + * It stores the transaction in the database, updates wallet balances, and handles + * various edge cases related to token creation and nano contract execution. + * + * Token Deletion Edge Cases: + * + * Tokens can be created in three different ways, each requiring different deletion rules: + * + * 1. **Pure CREATE_TOKEN_TX (no nano headers)** + * - Token created immediately when transaction hits mempool + * - Token deletion rule: Delete ONLY when transaction becomes voided + * - Example: Standard custom token creation + * + * 2. **Pure Nano Contract Transaction** + * - Token created via nano contract syscall when nc_execution = 'success' + * - Token deletion rules: + * a) Delete when first_block changes (handled in handleTokenCreated) + * - The token_id might change between reorgs even though tx_id stays the same + * - handleTokenCreated deletes old tokens before inserting new ones + * b) Delete when nc_execution changes from 'success' to something else + * (handled in handleNcExecVoided) - this occurs during reorgs + * - Token can be re-created if nano executes successfully again after reorg + * + * 3. **Hybrid Transaction (CREATE_TOKEN_TX + Nano Contract)** + * - Creates TWO sets of tokens: + * a) CREATE_TOKEN_TX token: Received immediately when tx hits mempool (token_id = tx_id) + * b) Nano-created tokens: Received when nano executes successfully (token_id ≠ tx_id) + * - Token deletion rules: + * - CREATE_TOKEN_TX token: Delete ONLY when transaction becomes voided + * - Nano-created tokens: Delete when first_block changes (in handleTokenCreated) OR + * nc_execution changes from 'success' to something else (in handleNcExecVoided) + * - During reorg: Only nano-created tokens are deleted, CREATE_TOKEN_TX token remains + * - When voided: BOTH sets of tokens are deleted + * + * @param context - The context containing the event and other metadata + * @param _event - The event being processed (unused, context.event is used instead) + */ export const handleVertexAccepted = async (context: Context, _event: Event) => { const mysql = await getDbConnection(); await mysql.beginTransaction(); @@ -289,13 +351,6 @@ export const handleVertexAccepted = async (context: Context, _event: Event) => { await unlockTimelockedUtxos(mysql, now); } - if (version === hathorLib.constants.CREATE_TOKEN_TX_VERSION) { - if (!token_name || !token_symbol) { - throw new Error('Processed a token creation event but it did not come with token name and symbol'); - } - await storeTokenInformation(mysql, hash, token_name, token_symbol); - } - // check if any of the inputs are still marked as locked and update tables accordingly. // See remarks on getLockedUtxoFromInputs for more explanation. It's important to perform this // before updating the balances @@ -527,6 +582,43 @@ export const handleVertexRemoved = async (context: Context, _event: Event) => { } }; +/** + * Voids a transaction and all its associated data. + * + * This function handles the complete voiding process including: + * - Marking transaction as voided in database + * - Marking all UTXOs as voided + * - Unspending inputs that were spent by this transaction + * - Updating wallet and address balances + * - Clearing tx_proposal marks + * - Deleting ALL tokens created by this transaction + * + * Token Deletion Behavior: + * + * When a transaction is voided, ALL tokens created by that transaction are deleted, + * regardless of how they were created: + * + * 1. **Pure CREATE_TOKEN_TX**: Deletes the CREATE_TOKEN_TX token (token_id = tx_id) + * + * 2. **Pure Nano Contract**: Deletes all tokens created by nano syscalls + * + * 3. **Hybrid Transaction (CREATE_TOKEN_TX + Nano)**: Deletes BOTH: + * - The CREATE_TOKEN_TX token (token_id = tx_id) + * - All nano-created tokens (token_id ≠ tx_id) + * + * Important: This deletion is INDEPENDENT of nano contract execution state: + * - A voided transaction might still have nc_execution = 'success' + * - Voiding applies to the ENTIRE transaction, so all tokens are deleted + * - This is different from nano execution state changes, which only delete nano-created tokens + * + * @param mysql - Database connection (must be in transaction) + * @param hash - Transaction hash + * @param inputs - Transaction inputs + * @param outputs - Transaction outputs + * @param tokens - Token UIDs in the transaction + * @param headers - Transaction headers (for nano contracts) + * @param version - Transaction version + */ export const voidTx = async ( mysql: MysqlConnection, hash: string, @@ -603,6 +695,33 @@ export const voidTx = async ( // This ensures the UTXOs can be used in new transactions after the void await clearTxProposalForVoidedTx(mysql, txInputs); + /** + * Delete ALL tokens created by this voided transaction. + * + * This handles all three token creation scenarios: + * + * 1. Pure CREATE_TOKEN_TX (no nano): + * - Deletes the single CREATE_TOKEN_TX token (token_id = tx_id) + * + * 2. Pure nano contract: + * - Deletes all tokens created by nano syscalls (token_id ≠ tx_id) + * + * 3. Hybrid (CREATE_TOKEN_TX + nano): + * - Deletes BOTH the CREATE_TOKEN_TX token AND all nano-created tokens + * + * Note: This is INDEPENDENT of nano execution state (nc_execution). + * Even if nc_execution = 'success', we delete all tokens because the + * ENTIRE transaction is being voided. + * + * See handleVertexAccepted for nano execution state change logic, which + * ONLY deletes nano-created tokens when nc_execution becomes non-SUCCESS. + */ + const tokensCreated = await getTokensCreatedByTx(mysql, hash); + if (tokensCreated.length > 0) { + logger.debug(`Voiding transaction ${hash} created ${tokensCreated.length} token(s), deleting them`); + await deleteTokens(mysql, tokensCreated); + } + const addresses = Object.keys(addressBalanceMap); await validateAddressBalances(mysql, addresses); }; @@ -707,6 +826,51 @@ export const handleTxFirstBlock = async (context: Context) => { } }; +/** + * Handle NC_EXEC_VOIDED event - nc_execution changed from 'success' to something else. + * + * This happens during reorgs when a transaction goes back to mempool and nc_execution + * changes from 'success' to 'pending' or null. When this occurs, any tokens created + * by the nano contract execution are no longer valid. + * + * This handler deletes all nano-created tokens for the transaction. Traditional + * CREATE_TOKEN_TX tokens (token_id = tx_id) are NOT affected - they remain valid + * because the token creation is inherent to the transaction itself, not dependent + * on nano contract execution. + */ +export const handleNcExecVoided = async (context: Context) => { + const mysql = await getDbConnection(); + await mysql.beginTransaction(); + + try { + const fullNodeEvent = context.event as StandardFullNodeEvent; + const { hash } = fullNodeEvent.event.data; + + // Get all tokens created by this transaction + const tokensCreated = await getTokensCreatedByTx(mysql, hash); + + if (tokensCreated.length > 0) { + // Filter out traditional CREATE_TOKEN_TX tokens (where token_id = tx_id) + // These should NOT be deleted because they're inherent to the transaction + const nanoTokens = tokensCreated.filter(tokenId => tokenId !== hash); + + if (nanoTokens.length > 0) { + logger.debug(`NC execution voided for tx ${hash}, deleting ${nanoTokens.length} nano-created tokens`); + await deleteTokens(mysql, nanoTokens); + } + } + + await dbUpdateLastSyncedEvent(mysql, fullNodeEvent.event.id); + await mysql.commit(); + } catch (e) { + logger.error('handleNcExecVoided error: ', e); + await mysql.rollback(); + throw e; + } finally { + mysql.destroy(); + } +}; + export const updateLastSyncedEvent = async (context: Context) => { const mysql = await getDbConnection(); @@ -817,6 +981,73 @@ export const handleReorgStarted = async (context: Context): Promise => { } }; +export const handleTokenCreated = async (context: Context) => { + const mysql = await getDbConnection(); + await mysql.beginTransaction(); + + try { + const fullNodeEvent = context.event; + if (!fullNodeEvent) { + throw new Error('No event in context'); + } + + if (fullNodeEvent.event.type !== FullNodeEventTypes.TOKEN_CREATED) { + throw new Error('Invalid event type for TOKEN_CREATED'); + } + + const { + token_uid, + token_name, + token_symbol, + nc_exec_info, + } = fullNodeEvent.event.data; + + logger.debug(`Handling TOKEN_CREATED event for token ${token_uid}: ${token_name} (${token_symbol})`); + + // Store the mapping between token and the transaction that created it + // For regular CREATE_TOKEN_TX: nc_exec_info is null, token_uid equals tx_id + // For nano contract tokens: nc_exec_info.nc_tx contains the transaction hash + const txId = nc_exec_info?.nc_tx ?? token_uid; + const firstBlock = nc_exec_info?.nc_block ?? null; + + /** + * Handle reorg scenario: first_block changed + * + * When a nano contract re-executes in a different block during a reorg, + * the token_id might change even though tx_id stays the same. + * Delete tokens with old first_block before inserting the new one. + */ + const tokensWithOldBlock = await getReexecNanoTokens(mysql, txId, firstBlock); + if (tokensWithOldBlock.length > 0) { + logger.debug(`First block changed for tx ${txId}, deleting ${tokensWithOldBlock.length} tokens with old first_block`); + await deleteTokens(mysql, tokensWithOldBlock); + } + + // Check if this exact token already exists + const existingToken = await getTokenInformation(mysql, token_uid); + + if (!existingToken) { + // Insert the new token + await storeTokenInformation(mysql, token_uid, token_name, token_symbol); + await insertTokenCreation(mysql, token_uid, txId, firstBlock); + logger.debug(`Inserted new token ${token_uid} with first_block=${firstBlock}`); + } else { + logger.debug(`Token ${token_uid} already exists, skipping insertion`); + } + + await dbUpdateLastSyncedEvent(mysql, fullNodeEvent.event.id); + + await mysql.commit(); + logger.debug(`Successfully stored token ${token_uid} created by tx ${txId}`); + } catch (e) { + logger.error('Error handling TOKEN_CREATED event', e); + await mysql.rollback(); + throw e; + } finally { + mysql.destroy(); + } +}; + /** * Checks the HTTP API for missed events after the last ACK * This is used to detect if we lost an event due to network packet loss diff --git a/packages/daemon/src/types/event.ts b/packages/daemon/src/types/event.ts index fea3afd6..3a93723c 100644 --- a/packages/daemon/src/types/event.ts +++ b/packages/daemon/src/types/event.ts @@ -45,6 +45,7 @@ export enum FullNodeEventTypes { REORG_STARTED = 'REORG_STARTED', REORG_FINISHED = 'REORG_FINISHED', NC_EVENT = 'NC_EVENT', + TOKEN_CREATED = 'TOKEN_CREATED', FULL_NODE_CRASHED = 'FULL_NODE_CRASHED', } @@ -69,7 +70,7 @@ const EmptyDataFullNodeEvents = z.union([ export const FullNodeEventTypesSchema = z.nativeEnum(FullNodeEventTypes); export type MetadataDecidedEvent = { - type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE'; + type: 'TX_VOIDED' | 'TX_UNVOIDED' | 'TX_NEW' | 'TX_FIRST_BLOCK' | 'IGNORE' | 'NC_EXEC_VOIDED'; originalEvent: FullNodeEvent; } @@ -161,6 +162,34 @@ export const TxEventDataSchema = TxEventDataWithoutMetaSchema.extend({ voided_by: z.string().array(), first_block: z.string().nullable(), height: z.number(), + /** + * Nano contract execution state. + * + * This field indicates the execution status of nano contracts in this transaction: + * - 'pending': Nano contract is waiting to be executed (before first_block) + * - 'success': Nano contract executed successfully + * - 'failure': Nano contract execution failed + * - 'skipped': Nano contract execution was skipped + * - null/undefined: Not a nano contract transaction, or execution state not available + * + * Important: This field is INDEPENDENT of transaction voiding (voided_by): + * - A voided transaction might still have nc_execution = 'success' + * - A non-voided transaction might have nc_execution = 'failure' + * + * Token Creation Implications: + * - Tokens created by nano syscalls are only valid when nc_execution = 'success' + * - When nc_execution changes from 'success' to any other state (e.g., during reorg), + * any tokens created by that nano execution must be deleted + * - This is separate from CREATE_TOKEN_TX tokens, which are deleted only on void + * + * See handleVertexAccepted in services/index.ts for the token deletion logic. + */ + nc_execution: z.union([ + z.literal('pending'), + z.literal('success'), + z.literal('failure'), + z.literal('skipped'), + ]).nullable().optional(), }), }); @@ -231,12 +260,34 @@ export const NcEventSchema = FullNodeEventBaseSchema.extend({ }); export type NcEvent = z.infer; +export const TokenCreatedEventSchema = FullNodeEventBaseSchema.extend({ + event: z.object({ + id: z.number(), + timestamp: z.number(), + type: z.literal('TOKEN_CREATED'), + data: z.object({ + token_uid: z.string(), + nc_exec_info: z.object({ + nc_tx: z.string(), + nc_block: z.string(), + }).nullable(), + token_name: z.string(), + token_symbol: z.string(), + token_version: z.number(), + initial_amount: z.number().optional(), + }), + group_id: z.number().nullable(), + }), +}); +export type TokenCreatedEvent = z.infer; + export const FullNodeEventSchema = z.union([ TxDataWithoutMetaFullNodeEventSchema, StandardFullNodeEventSchema, ReorgFullNodeEventSchema, EmptyDataFullNodeEventSchema, NcEventSchema, + TokenCreatedEventSchema, ]); export type FullNodeEvent = z.infer;