Skip to content

Commit

Permalink
Added type decomposition for storage functions (#70)
Browse files Browse the repository at this point in the history
* Added type decomposition for storage functions

* Added unit tests and updated README.md
  • Loading branch information
arjanz authored Feb 17, 2022
1 parent ee72e6a commit d80aa8e
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 13 deletions.
40 changes: 28 additions & 12 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pip install scalecodec

## Examples (MetadataV14 runtimes and higher)

Encode a Call
### Encode a Call

```python

Expand All @@ -46,24 +46,40 @@ call.encode({
})
```

Decode the result of a `state_getStorageAt` RPC call
### Decode the result of a `state_getStorageAt` RPC call

```python
event_data = "0x2000000000000000b0338609000000000200000001000000000080b2e60e0000000002000000020000000003be1957935299d0be2f35b8856751feab95fc7089239366b52b72ca98249b94300000020000000500be1957935299d0be2f35b8856751feab95fc7089239366b52b72ca98249b943000264d2823000000000000000000000000000200000005027a9650a6bd43f1e0b4546affb88f8c14213e1fb60512692c2b39fbfcfc56b703be1957935299d0be2f35b8856751feab95fc7089239366b52b72ca98249b943000264d2823000000000000000000000000000200000013060c4c700700000000000000000000000000000200000005047b8441d5110c178c29be709793a41d73ae8b3119a971b18fbd20945ea5d622f00313dc01000000000000000000000000000002000000000010016b0b00000000000000"

system_pallet = [p for p in metadata.pallets if p['name'] == 'System'][0]
event_storage_function = [s for s in system_pallet['storage']['entries'] if s['name'] == "Events"][0]

system_pallet = metadata.get_metadata_pallet("System")
event_storage_function = system_pallet.get_storage_function("Events")

event = runtime_config.create_scale_object(
event_storage_function.get_value_type_string(), data=ScaleBytes(event_data), metadata=metadata
event_storage_function.get_value_type_string(), metadata=metadata
)
print(event.decode())
print(event.decode(ScaleBytes(event_data)))
```

### Retrieve type decomposition information of a `RegistryType`:

```python
pallet = metadata.get_metadata_pallet("System")
storage_function = pallet.get_storage_function("BlockHash")

param_type_string = storage_function.get_params_type_string()
param_type_obj = runtime_config.create_scale_object(param_type_string[0])

type_info = param_type_obj.scale_info_type.retrieve_type_decomposition()

print(type_info)
# {'primitive': 'u32'}
```



## Examples (prior to MetadataV14)

Decode a SCALE-encoded Compact\<Balance\>
### Decode a SCALE-encoded Compact\<Balance\>

```python
RuntimeConfiguration().update_type_registry(load_type_registry_preset("default"))
Expand All @@ -73,7 +89,7 @@ obj.decode()
print(obj.value)
```

Encode to Compact\<Balance\>
### Encode to Compact\<Balance\>

```python
RuntimeConfiguration().update_type_registry(load_type_registry_preset("default"))
Expand All @@ -82,7 +98,7 @@ scale_data = obj.encode(2503000000000000000)
print(scale_data)
```

Encode to Vec\<Bytes\>
### Encode to Vec\<Bytes\>

```python
RuntimeConfiguration().update_type_registry(load_type_registry_preset("default"))
Expand All @@ -92,7 +108,7 @@ scale_data = obj.encode(value)
print(scale_data)
```

Add custom types to type registry
### Add custom types to type registry

```python
RuntimeConfiguration().update_type_registry(load_type_registry_preset("default"))
Expand All @@ -113,7 +129,7 @@ custom_types = {
RuntimeConfiguration().update_type_registry(custom_types)
```

Or from a custom JSON file
### Or from a custom JSON file

```python
RuntimeConfiguration().update_type_registry(load_type_registry_preset("default"))
Expand Down
37 changes: 37 additions & 0 deletions scalecodec/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -1996,6 +1996,32 @@ def process_encode(self, value):
return super().process_encode(value)

def retrieve_type_decomposition(self):
if 'variant' in self.value['def']:
for variant in self.value['def']['variant']['variants']:
for field in variant['fields']:
field_obj = self.runtime_config.create_scale_object(f"scale_info::{field['type']}")
variant['value'] = field_obj.scale_info_type.retrieve_type_decomposition()

elif 'composite' in self.value['def']:
for field in self.value['def']['composite']['fields']:
field_obj = self.runtime_config.create_scale_object(f"scale_info::{field['type']}")
field['value'] = field_obj.scale_info_type.retrieve_type_decomposition()

elif 'array' in self.value['def']:
type_def = self.value['def']['array']
array_element_obj = self.runtime_config.create_scale_object(f"scale_info::{type_def['type']}")
type_def['value'] = array_element_obj.scale_info_type.retrieve_type_decomposition()

elif 'sequence' in self.value['def']:
type_def = self.value['def']['sequence']
array_element_obj = self.runtime_config.create_scale_object(f"scale_info::{type_def['type']}")
type_def['value'] = array_element_obj.scale_info_type.retrieve_type_decomposition()

elif 'tuple' in self.value['def']:
for idx, type_def in self.value['def']['tuple']:
element_obj = self.runtime_config.create_scale_object(f"scale_info::{type_def}")
self.value['def']['tuple'][idx] = element_obj.scale_info_type.retrieve_type_decomposition()

return self.value['def']


Expand Down Expand Up @@ -2300,6 +2326,9 @@ def get_param_hashers(self):
else:
raise NotImplementedError()

def get_param_info(self) -> list:
raise NotImplementedError()


class ScaleInfoStorageEntryMetadata(GenericStorageEntryMetadata):

Expand Down Expand Up @@ -2349,6 +2378,14 @@ def get_param_hashers(self):
else:
raise NotImplementedError()

def get_param_info(self) -> list:
param_info = []
for param_type_string in self.get_params_type_string():
scale_type = self.runtime_config.create_scale_object(param_type_string)
param_info.append(scale_type.scale_info_type.retrieve_type_decomposition())

return param_info


class GenericEventMetadata(Struct):

Expand Down
3 changes: 2 additions & 1 deletion test/fixtures/metadata_hex.json

Large diffs are not rendered by default.

42 changes: 42 additions & 0 deletions test/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,4 +128,46 @@ def test_metadata_registry_decode_v14(self):
# pickle_data = pickle.dumps(metadata_obj)


class TestMetadataTypes(unittest.TestCase):

metadata_version = 'karura_test'

@classmethod
def setUpClass(cls):
cls.runtime_config = RuntimeConfigurationObject()
cls.runtime_config.update_type_registry(load_type_registry_preset("metadata_types"))

module_path = os.path.dirname(__file__)
cls.metadata_fixture_dict = load_type_registry_file(
os.path.join(module_path, 'fixtures', 'metadata_hex.json')
)

cls.metadata_obj = cls.runtime_config.create_scale_object(
"MetadataVersioned", data=ScaleBytes(cls.metadata_fixture_dict[cls.metadata_version])
)
cls.metadata_obj.decode()

cls.runtime_config.add_portable_registry(cls.metadata_obj)

def test_storage_function_type_decomposition_simple(self):
pallet = self.metadata_obj.get_metadata_pallet("System")
storage_function = pallet.get_storage_function("BlockHash")

param_type_string = storage_function.get_params_type_string()
param_type_obj = self.runtime_config.create_scale_object(param_type_string[0])

type_info = param_type_obj.scale_info_type.retrieve_type_decomposition()
self.assertDictEqual({'primitive': 'u32'}, type_info)

def test_storage_function_type_decomposition_complex(self):
pallet = self.metadata_obj.get_metadata_pallet("Tokens")
storage_function = pallet.get_storage_function("TotalIssuance")

param_type_string = storage_function.get_params_type_string()
param_type_obj = self.runtime_config.create_scale_object(param_type_string[0])

type_info = param_type_obj.scale_info_type.retrieve_type_decomposition()
self.assertEqual('Token', type_info['variant']['variants'][0]['name'])
self.assertEqual('ACA', type_info['variant']['variants'][0]['value']['variant']['variants'][0]['name'])


0 comments on commit d80aa8e

Please sign in to comment.