Skip to content
This repository was archived by the owner on May 22, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion include/tvm/runtime/relax_vm/executable.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
#ifndef TVM_RUNTIME_RELAX_VM_EXECUTABLE_H_
#define TVM_RUNTIME_RELAX_VM_EXECUTABLE_H_

#include <tvm/ir/expr.h>
#include <tvm/runtime/container/closure.h>
#include <tvm/runtime/object.h>
#include <tvm/runtime/registry.h>
Expand Down
40 changes: 38 additions & 2 deletions python/tvm/contrib/hexagon/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@
from typing import Union

import tvm
from tvm import relax
from tvm import rpc as _rpc
from tvm.contrib import utils
import tvm.contrib.hexagon as hexagon
from tvm.relay.backend.executor_factory import (
ExecutorFactoryModule,
Expand Down Expand Up @@ -247,13 +249,13 @@ def get_graph_debug_executor(
graph_json, graph_debug_mod, self.device, dump_root=str(dump_root)
)

def get_executor_from_factory(self, module: ExecutorFactoryModule):
def get_executor_from_factory(self, module: Union[ExecutorFactoryModule, relax.vm.Executable]):
"""Create a local GraphModule which consumes a remote libmod.

Parameters
----------

module : ExecutorFactoryModule
module : Union[ExecutorFactoryModule, relax.vm.Executable]

The module to upload to the remote
session and load.
Expand All @@ -262,6 +264,8 @@ def get_executor_from_factory(self, module: ExecutorFactoryModule):
return self._aot_executor_from_factory(module)
if isinstance(module, GraphExecutorFactoryModule):
return self._graph_executor_from_factory(module)
if isinstance(module, relax.vm.Executable):
return self._relax_vm_executable_executor(module)

raise TypeError(f"Unsupported executor type: {type(module)}")

Expand Down Expand Up @@ -313,6 +317,38 @@ def _graph_executor_from_factory(
"""
return self.get_graph_executor(module.get_graph_json(), module.get_lib())

def _relax_vm_executable_executor(
self,
vm_exec: relax.vm.Executable,
):
"""Create a local TVM module which consumes a remote vm executable.

Paramters
---------

vm_exec : relax.vm.Executable
The Relax VM Executable to upload to the remote and load. This will typically be the
output of `relax.vm.build`.

Returns
-------
TVMModule :
TVM module object
"""
assert self._rpc is not None, "Hexagon session must be started using __enter__ prior to use"

temp_dir = utils.tempdir()
path_exec = temp_dir.relpath("exec.so")

vm_exec.mod.export_library(
path_exec,
fcompile=hexagon.create_aot_shared,
hexagon_arch="v68",
)

self.upload(path_exec, "exec.so")
return self._rpc.get_function("tvm.hexagon.load_module")("exec.so")

def _aot_executor_from_factory(
self,
module: Union[str, pathlib.Path, AOTExecutorFactoryModule],
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/runtime/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def export_library(self, file_name, fcompile=None, addons=None, workspace_dir=No
object_format = "cu"
has_c_module = True
else:
assert module.type_key == "llvm" or module.type_key == "static_library"
assert module.is_dso_exportable
object_format = "o"
path_obj = os.path.join(workspace_dir, f"lib{index}.{object_format}")
module.save(path_obj)
Expand Down
10 changes: 10 additions & 0 deletions src/relax/backend/vm/codegen_vm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,11 @@ class CodeGenVM : public ExprFunctor<Instruction::Arg(const Expr&)> {
LOG(FATAL) << "CodeGenVM does not support calls to " << call_node->op->GetTypeKey();
}
std::vector<Instruction::Arg> args;
// TODO(prakalp): For extern function `vm.builtin.alloc_shape_heap` we must pass vm register as
// well to find the device in which shape heap must be allocated.
if (name == "vm.builtin.alloc_shape_heap") {
args.push_back(Instruction::Arg(Instruction::kRegister, Instruction::kVMRegister));
}
for (auto arg : call_node->args) {
args.push_back(this->VisitExpr(arg));
}
Expand Down Expand Up @@ -473,6 +478,11 @@ class CodeGenVM : public ExprFunctor<Instruction::Arg(const Expr&)> {
shape_tuple_value = shape_tuple;
Index index = builder_->EmitConstant(shape_tuple_value);
return Instruction::Arg(Instruction::kConstIdx, index);
} else if (arg->IsInstance<ConstantNode>()) {
TVMRetValue constant_data;
constant_data = Downcast<Constant>(arg)->data;
Index index = builder_->EmitConstant(constant_data);
return Instruction::Arg(Instruction::kConstIdx, index);
} else {
LOG(FATAL) << "CodeGenVM does not support this argument type:\n" << arg->GetTypeKey();
}
Expand Down
1 change: 1 addition & 0 deletions src/runtime/hexagon/hexagon_module.h
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class HexagonModuleNode : public runtime::ModuleNode {
const char* type_key() const final { return "hexagon"; }
void SaveToFile(const std::string& file_name, const std::string& format) override;
void SaveToBinary(dmlc::Stream* stream) override;
bool IsDSOExportable() const final { return true; }

protected:
std::string data_;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
* under the License.
*/
/*!
* \file src/relax/backend/vm/builtin.cc
* \file src/runtime/relax_vm/builtin.cc
*/
#include <tvm/runtime/container/adt.h>
#include <tvm/runtime/data_type.h>
Expand All @@ -41,9 +41,11 @@ TVM_REGISTER_GLOBAL("vm.builtin.shape_of").set_body_method(&NDArray::Shape);

TVM_REGISTER_GLOBAL("vm.builtin.copy").set_body_typed([](NDArray src) { return src; });

TVM_REGISTER_GLOBAL("vm.builtin.alloc_shape_heap").set_body_typed([](ShapeTuple size) {
return NDArray::Empty(size, DLDataType{kDLInt, 64, 1}, DLDevice{kDLCPU, 0});
});
TVM_REGISTER_GLOBAL("vm.builtin.alloc_shape_heap")
.set_body_typed([](void* vm_ptr, ShapeTuple size) {
VirtualMachine* vm = static_cast<VirtualMachine*>(vm_ptr);
return NDArray::Empty(size, DLDataType{kDLInt, 64, 1}, vm->devices[0]);
});

TVM_REGISTER_GLOBAL("vm.builtin.alloc_closure").set_body([](TVMArgs args, TVMRetValue* rv) {
std::vector<ObjectRef> cap_vars;
Expand Down Expand Up @@ -121,9 +123,6 @@ TVM_REGISTER_GLOBAL("vm.builtin.alloc_storage")
}

int64_t size_imm = buffer_size[0];
DLOG(INFO) << "AllocStorage: allocation_size=" << size_imm << ", alignment=" << alignment
<< ", dtype_hint=" << runtime::DLDataType2String(dtype_hint)
<< ", device_index=" << device_index;

auto storage_obj = runtime::SimpleObjAllocator().make_object<StorageObj>();
auto* alloc = vm->allocators[device_index];
Expand All @@ -144,12 +143,8 @@ TVM_REGISTER_GLOBAL("vm.binary_broadcast_shape_infer")
for (; i <= std::min(ndim0, ndim1); ++i) {
int64_t lhs_dim = lhs_shape[ndim0 - i];
int64_t rhs_dim = rhs_shape[ndim1 - i];
if (lhs_dim == 1 || rhs_dim == 1 || lhs_dim == rhs_dim) {
output_shape.push_back(std::max(lhs_dim, rhs_dim));
} else {
LOG(FATAL) << "Incompatible shapes " << lhs_shape << " and " << rhs_shape
<< " for broadcasting";
}
ICHECK(lhs_dim == rhs_dim || lhs_dim == 1 || rhs_dim == 1);
output_shape.push_back(std::max(lhs_dim, rhs_dim));
}
size_t max_ndim = std::max(ndim0, ndim1);
ShapeTuple& longer_shape = (ndim0 > ndim1) ? lhs_shape : rhs_shape;
Expand Down
6 changes: 3 additions & 3 deletions src/runtime/relax_vm/executable.cc
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ void Executable::SetInstructionData(Index i, Index j, ExecWord val) {
}

Instruction Executable::GetInstruction(Index i) const {
size_t offset = instr_offset[i];
Index offset = instr_offset[i];
Opcode op = static_cast<Opcode>(instr_data[offset]);
switch (op) {
case Opcode::Call: {
Expand Down Expand Up @@ -383,7 +383,7 @@ void Executable::LoadConstantSection(dmlc::Stream* strm) {
cell = ndarray;
this->constants.push_back(cell);
} else if (constant_type == ConstantType::kShapeTuple) {
size_t size;
uint64_t size;
strm->Read(&size);
std::vector<ShapeTuple::index_type> data(size);
for (size_t i = 0; i < size; ++i) {
Expand All @@ -398,7 +398,7 @@ void Executable::LoadConstantSection(dmlc::Stream* strm) {
cell = dtype;
this->constants.push_back(cell);
} else if (constant_type == ConstantType::kString) {
size_t size;
uint64_t size;
strm->Read(&size);
std::vector<char> data(size);
for (size_t i = 0; i < size; ++i) {
Expand Down
Loading