Skip to content
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
51 commits
Select commit Hold shift + click to select a range
e28b949
Added macro generation in MLF export
fzi-peccia Sep 15, 2022
ff40652
Fixed tests
fzi-peccia Sep 15, 2022
cd33ca7
Fixed black formatting
fzi-peccia Sep 15, 2022
7589796
Small lint fix
fzi-peccia Sep 15, 2022
d755cf0
Last lint fix
fzi-peccia Sep 15, 2022
323500a
cpp lint fix
fzi-peccia Sep 15, 2022
fcb46ec
Fixed cpp tests
fzi-peccia Sep 15, 2022
7014604
cpp lint improvements
fzi-peccia Sep 15, 2022
e312d8e
Fixed cpp tests
fzi-peccia Sep 15, 2022
c22f03d
cpp lint
fzi-peccia Sep 15, 2022
de52850
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
475a0ab
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
8bf30d4
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
a544a26
SanitizeName added and tests fixed
fzi-peccia Sep 16, 2022
df0854f
lint fixes
fzi-peccia Sep 16, 2022
7e432e8
lint fix
fzi-peccia Sep 16, 2022
f6d707e
lint
fzi-peccia Sep 16, 2022
80dc8e6
Merge branch 'main' into mlf-macro-gen
fzi-peccia Oct 21, 2022
4e3a07f
requested improvements and tests changes
fzi-peccia Oct 21, 2022
f645a33
Improved format
fzi-peccia Oct 21, 2022
d25c4c8
Merge remote-tracking branch 'upstream/main' into mlf-macro-gen
fzi-peccia Nov 22, 2022
91cf25a
Fixed failing tests
fzi-peccia Nov 22, 2022
b5bc50e
lint fix
fzi-peccia Nov 22, 2022
97f0b5d
Removed unnecessary check
fzi-peccia Nov 22, 2022
3931d0b
Removed unnecessary checks
fzi-peccia Nov 23, 2022
107e482
Added macro generation in MLF export
fzi-peccia Sep 15, 2022
a210129
Fixed tests
fzi-peccia Sep 15, 2022
ef9c6a8
Fixed black formatting
fzi-peccia Sep 15, 2022
d768723
Small lint fix
fzi-peccia Sep 15, 2022
e6d8f08
Last lint fix
fzi-peccia Sep 15, 2022
da23330
cpp lint fix
fzi-peccia Sep 15, 2022
62a6714
Fixed cpp tests
fzi-peccia Sep 15, 2022
d2f217d
cpp lint improvements
fzi-peccia Sep 15, 2022
52f67fc
Fixed cpp tests
fzi-peccia Sep 15, 2022
c4233fc
cpp lint
fzi-peccia Sep 15, 2022
4a863c3
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
2802c1b
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
8e8a4b2
Update src/target/source/interface_c.cc
fPecc Sep 16, 2022
c27c359
SanitizeName added and tests fixed
fzi-peccia Sep 16, 2022
3391884
lint fixes
fzi-peccia Sep 16, 2022
17b0cab
lint fix
fzi-peccia Sep 16, 2022
7723957
lint
fzi-peccia Sep 16, 2022
6c54eec
requested improvements and tests changes
fzi-peccia Oct 21, 2022
1fd4b70
Improved format
fzi-peccia Oct 21, 2022
f782c9e
Fixed failing tests
fzi-peccia Nov 22, 2022
0790d45
lint fix
fzi-peccia Nov 22, 2022
307c724
Removed unnecessary check
fzi-peccia Nov 22, 2022
c9be298
Removed unnecessary checks
fzi-peccia Nov 23, 2022
8468ade
Merge branch 'mlf-macro-gen' of github.com:fPecc/tvm into mlf-macro-gen
fzi-peccia Dec 6, 2022
fa16303
Merge remote-tracking branch 'upstream/main' into mlf-macro-gen
fzi-peccia Dec 6, 2022
94934a6
Merge branch 'main' into mlf-macro-gen
fzi-peccia Dec 7, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
68 changes: 65 additions & 3 deletions python/tvm/micro/model_library_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,32 @@ class UnsupportedInModelLibraryFormatError(Exception):


def generate_c_interface_header(
module_name, inputs, outputs, pools, io_pool_allocations, devices, workspace_size, include_path
module_name,
inputs,
outputs,
pools,
io_pool_allocations,
devices,
workspace_size,
include_path,
input_sizes,
output_sizes,
):
"""Generate C Interface header to be included in MLF"""
mangled_name = to_c_variable_style(prefix_generated_name(module_name))
metadata_header = os.path.join(include_path, f"{mangled_name}.h")

interface_c_create = tvm._ffi.get_global_func("runtime.InterfaceCCreate")
interface_c_module = interface_c_create(
module_name, inputs, outputs, pools, io_pool_allocations, devices, workspace_size
module_name,
inputs,
outputs,
pools,
io_pool_allocations,
devices,
workspace_size,
input_sizes,
output_sizes,
)

with open(metadata_header, "w") as header_file:
Expand Down Expand Up @@ -277,6 +294,37 @@ def _create_empty_entry(target_device_type):
main_func_metadata.io_sizes[target]
)

# Now, we also add the information about the size of each input and output of the main
# function (in bytes)
input_dict = {}
for input_param in main_func_metadata.relay_primfuncs[target].params:
if hasattr(input_param, "checked_type"):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see a test case for when this is not set? How do we reproduce it?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @Mousius . I reviewed the code and noticed that the parameters of the primfuncs in the main_func_metadata always seem to have the checked_type set, so I removed the check here.

input_dict[input_param.name_hint] = int(
_shape_to_size(input_param.checked_type.shape, input_param.checked_type.dtype)
)
else:
# TODO: maybe fill checked_type here?
input_dict[input_param.name_hint] = 0
target_main_entries[int(target.kind.device_type)]["inputs"] = input_dict

output_dict = {}
# For output, we dont have the name of the output, so we enumerate them
if isinstance(main_func_metadata.relay_primfuncs[target].ret_type, tvm.ir.type.TupleType):
for i, output_type in enumerate(
main_func_metadata.relay_primfuncs[target].ret_type.fields
):
if hasattr(output_type, "shape"):
output_dict[i] = int(_shape_to_size(output_type.shape, output_type.dtype))
else:
output_dict[i] = 0
else:
output_type = main_func_metadata.relay_primfuncs[target].ret_type
if hasattr(output_type, "shape"):
output_dict[0] = int(_shape_to_size(output_type.shape, output_type.dtype))
else:
output_dict[0] = 0
target_main_entries[int(target.kind.device_type)]["outputs"] = output_dict

ret = {
"operator_functions": func_entries,
"main": list(target_main_entries.values()),
Expand Down Expand Up @@ -427,6 +475,18 @@ def _export_graph_model_library_format(
"workspace_size_bytes"
]
)
inputs_sizes = metadata["modules"][mod.libmod_name]["memory"]["functions"]["main"][0][
"inputs"
]
# Here, we merge the output sizes with the actual output names
output_sizes = {}
for key in metadata["modules"][mod.libmod_name]["memory"]["functions"]["main"][0][
"outputs"
].keys():
output_sizes[outputs[key]] = metadata["modules"][mod.libmod_name]["memory"][
"functions"
]["main"][0]["outputs"][key]

generate_c_interface_header(
mod.libmod_name,
inputs,
Expand All @@ -436,6 +496,8 @@ def _export_graph_model_library_format(
devices,
workspace_size,
include_path,
inputs_sizes,
output_sizes,
)

is_aot = isinstance(mod, executor_factory.AOTExecutorFactoryModule)
Expand All @@ -459,7 +521,7 @@ class NonStaticShapeError(Exception):

def _shape_to_size(shape, dtype):
bits_per_item = int(
re.match(r"((float)|(int))(?P<width_bits>[0-9]+)", dtype).group("width_bits")
re.match(r"((float)|(int)|(uint))(?P<width_bits>[0-9]+)", dtype).group("width_bits")
)
assert bits_per_item is not None, f"don't know how to compute size of type {dtype}"
total_bits = bits_per_item
Expand Down
34 changes: 30 additions & 4 deletions src/target/source/interface_c.cc
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,42 @@ class InterfaceCNode : public runtime::ModuleNode {
InterfaceCNode(std::string module_name, Array<String> inputs, Array<String> outputs,
Array<tir::usmp::AllocatedPoolInfo> pools,
Map<String, tir::usmp::PoolAllocation> io_pool_allocations, Array<String> devices,
int workspace_size)
int workspace_size, Map<String, IntImm> input_sizes,
Map<String, IntImm> output_sizes)
: module_name_(module_name),
inputs_(inputs),
outputs_(outputs),
devices_(devices),
pools_(FilterExternalPools(pools)),
io_pool_allocations_(io_pool_allocations),
workspace_size_(workspace_size) {}
workspace_size_(workspace_size),
input_sizes_(input_sizes),
output_sizes_(output_sizes) {}
const char* type_key() const final { return "h"; }

std::string GetSource(const std::string& format) final {
std::stringstream code;

EmitUpperHeaderGuard(code);

// Emit macros for input sizes
for (auto const& it : input_sizes_) {
std::string input_name = SanitizeName(it.first);
std::string input_macro_name = input_name + "_size";
int input_size = it.second->value;
EmitIntegerValueMacro(code, "Input tensor " + input_name + " size (in bytes)",
input_macro_name, input_size);
}

// Emit macros for output sizes
for (auto const& it : output_sizes_) {
std::string output_name = SanitizeName(it.first);
std::string output_macro_name = output_name + "_size";
int output_size = it.second->value;
EmitIntegerValueMacro(code, "Output tensor " + output_name + " size (in bytes)",
output_macro_name, output_size);
}

EmitBrief(code, "Input tensor pointers");
EmitStruct(code, "inputs", inputs_);
EmitBrief(code, "Output tensor pointers");
Expand Down Expand Up @@ -277,14 +299,18 @@ class InterfaceCNode : public runtime::ModuleNode {
Array<tir::usmp::AllocatedPoolInfo> pools_;
Map<String, tir::usmp::PoolAllocation> io_pool_allocations_;
int workspace_size_;
Map<String, IntImm> input_sizes_;
Map<String, IntImm> output_sizes_;
};

runtime::Module InterfaceCCreate(std::string module_name, Array<String> inputs,
Array<String> outputs, Array<tir::usmp::AllocatedPoolInfo> pools,
Map<String, tir::usmp::PoolAllocation> io_pool_allocations,
Array<String> devices, int workspace_size) {
Array<String> devices, int workspace_size,
Map<String, IntImm> input_sizes,
Map<String, IntImm> output_sizes) {
auto n = make_object<InterfaceCNode>(module_name, inputs, outputs, pools, io_pool_allocations,
devices, workspace_size);
devices, workspace_size, input_sizes, output_sizes);
return runtime::Module(n);
}

Expand Down
Loading