Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions hathor/cli/events_simulator/scenario.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,6 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho
assert manager.propagate_tx(b0, fails_silently=False)
simulator.run(60)

# the transaction should have been removed from the mempool
# the transaction should have been removed from the mempool and the storage after the re-org
assert tx not in manager.tx_storage.iter_mempool_from_best_index()

# additionally the transaction should have been marked as invalid and removed from the storage after the re-org
assert tx.get_metadata().validation.is_invalid()
assert not manager.tx_storage.transaction_exists(tx.hash)
2 changes: 1 addition & 1 deletion hathor/consensus/consensus.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def _unsafe_update(self, base: BaseTransaction) -> None:

# And emit events for txs that were removed
for tx_removed in txs_to_remove:
context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, vertex_id=tx_removed.hash)
context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx=tx_removed)

# and also emit the reorg finished event if needed
if context.reorg_common_block is not None:
Expand Down
15 changes: 5 additions & 10 deletions hathor/event/model/event_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def from_event_arguments(cls, args: EventArguments) -> 'EmptyData':
return cls()


class TxData(BaseEventData, extra=Extra.ignore):
class TxDataWithoutMeta(BaseEventData, extra=Extra.ignore):
"""Class that represents transaction data on an event."""
hash: str
nonce: Optional[int] = None
Expand All @@ -112,11 +112,10 @@ class TxData(BaseEventData, extra=Extra.ignore):
# TODO: Token name and symbol could be in a different class because they're only used by TokenCreationTransaction
token_name: Optional[str]
token_symbol: Optional[str]
metadata: 'TxMetadata'
aux_pow: Optional[str] = None

@classmethod
def from_event_arguments(cls, args: EventArguments) -> 'TxData':
def from_event_arguments(cls, args: EventArguments) -> Self:
from hathor.transaction.resources.transaction import get_tx_extra_data
tx_extra_data_json = get_tx_extra_data(args.tx, detail_tokens=False)
tx_json = tx_extra_data_json['tx']
Expand All @@ -138,12 +137,8 @@ def from_event_arguments(cls, args: EventArguments) -> 'TxData':
return cls(**tx_json)


class VertexIdData(BaseEventData):
vertex_id: str

@classmethod
def from_event_arguments(cls, args: EventArguments) -> Self:
return cls(vertex_id=args.vertex_id.hex())
class TxData(TxDataWithoutMeta):
metadata: 'TxMetadata'


class ReorgData(BaseEventData):
Expand All @@ -164,4 +159,4 @@ def from_event_arguments(cls, args: EventArguments) -> 'ReorgData':


# Union type to encompass BaseEventData polymorphism
EventData: TypeAlias = EmptyData | TxData | ReorgData | VertexIdData
EventData: TypeAlias = EmptyData | TxData | TxDataWithoutMeta | ReorgData
4 changes: 2 additions & 2 deletions hathor/event/model/event_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from enum import Enum

from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData, VertexIdData
from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData, TxDataWithoutMeta
from hathor.pubsub import HathorEvents


Expand Down Expand Up @@ -56,6 +56,6 @@ def data_type(self) -> type[BaseEventData]:
EventType.REORG_STARTED: ReorgData,
EventType.REORG_FINISHED: EmptyData,
EventType.VERTEX_METADATA_CHANGED: TxData,
EventType.VERTEX_REMOVED: VertexIdData,
EventType.VERTEX_REMOVED: TxDataWithoutMeta,
EventType.FULL_NODE_CRASHED: EmptyData,
}
4 changes: 1 addition & 3 deletions hathor/pubsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
from twisted.python.threadable import isInIOThread

from hathor.reactor import ReactorProtocol as Reactor
from hathor.types import VertexId
from hathor.utils.zope import verified_cast

if TYPE_CHECKING:
Expand Down Expand Up @@ -62,7 +61,7 @@ class HathorEvents(Enum):

CONSENSUS_TX_REMOVED:
Triggered when a tx is removed because it became invalid (due to a reward lock check)
Publishes the tx hash
Publishes the tx object

WALLET_OUTPUT_RECEIVED:
Triggered when a wallet receives a new output
Expand Down Expand Up @@ -146,7 +145,6 @@ class EventArguments:

# XXX: add these as needed, these attributes don't always exist, but when they do these are their types
tx: 'BaseTransaction'
vertex_id: VertexId
reorg_size: int
old_best_block: 'Block'
new_best_block: 'Block'
Expand Down
9 changes: 7 additions & 2 deletions hathor/transaction/resources/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,12 @@ def update_serialized_tokens_array(tx: BaseTransaction, serialized: dict[str, An
serialized['tokens'] = [h.hex() for h in tx.tokens]


def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dict[str, Any]:
def get_tx_extra_data(
tx: BaseTransaction,
*,
detail_tokens: bool = True,
force_reload_metadata: bool = True,
) -> dict[str, Any]:
""" Get the data of a tx to be returned to the frontend
Returns success, tx serializes, metadata and spent outputs
"""
Expand All @@ -61,7 +66,7 @@ def get_tx_extra_data(tx: BaseTransaction, *, detail_tokens: bool = True) -> dic

# Update tokens array
update_serialized_tokens_array(tx, serialized)
meta = tx.get_metadata(force_reload=True)
meta = tx.get_metadata(force_reload=force_reload_metadata)
# To get the updated accumulated weight just need to call the
# TransactionAccumulatedWeightResource (/transaction_acc_weight)

Expand Down
4 changes: 2 additions & 2 deletions tests/event/test_event_simulation_scenarios.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@
ReorgData,
SpentOutput,
TxData,
TxDataWithoutMeta,
TxInput,
TxMetadata,
TxOutput,
VertexIdData,
)
from hathor.event.model.event_type import EventType
from hathor.event.websocket.request import StartStreamRequest
Expand Down Expand Up @@ -340,7 +340,7 @@ def test_invalid_mempool(self) -> None:
EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501
EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501
# One VERTEX_REMOVED for the tx above
EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=VertexIdData(vertex_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501
EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=TxDataWithoutMeta(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501
# REORG_FINISHED
EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501
# One NEW_VERTEX_ACCEPTED for the block that caused the reorg
Expand Down
4 changes: 2 additions & 2 deletions tests/event/websocket/test_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,10 @@ def test_send_event_response() -> None:
b'"timestamp":123.0,"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,'
b'"timestamp":456,"signal_bits":0,"version":1,"weight":10.0,"inputs":[],"outputs":[],'
b'"parents":[],'
b'"tokens":[],"token_name":null,"token_symbol":null,"metadata":{"hash":"abc",'
b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"metadata":{"hash":"abc",'
b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],'
b'"twins":[],"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,'
b'"validation":"validation"},"aux_pow":null},"group_id":null},"latest_event_id":10,'
b'"validation":"validation"}},"group_id":null},"latest_event_id":10,'
b'"stream_id":"stream_id"}')

protocol.sendMessage.assert_called_once_with(expected_payload)
Expand Down