Skip to content

Commit 0126e6f

Browse files
committed
fix merge
Signed-off-by: richardhuo-nv <[email protected]>
1 parent d90d8c6 commit 0126e6f

File tree

1 file changed

+3
-23
lines changed

1 file changed

+3
-23
lines changed

tensorrt_llm/executor/worker.py

Lines changed: 3 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -159,29 +159,9 @@ def _create_engine(executor_config):
159159
executor_config=executor_config,
160160
managed_weights=engine.managed_weights)
161161

162-
if not hasattr(executor_config, "backend"):
163-
return tllm.Executor(engine, tllm.ModelType.DECODER_ONLY,
164-
executor_config)
165-
args = {
166-
"executor_config": executor_config,
167-
"checkpoint_dir": executor_config.hf_model_dir,
168-
}
169-
if executor_config.backend == "pytorch":
170-
from tensorrt_llm._torch.pyexecutor.py_executor_creator import \
171-
create_py_executor
172-
create_executor = create_py_executor
173-
args["lora_config"] = lora_config
174-
args[
175-
"garbage_collection_gen0_threshold"] = llm_args.garbage_collection_gen0_threshold
176-
args["kv_connector_config"] = kv_connector_config
177-
elif executor_config.backend == "_autodeploy":
178-
from tensorrt_llm._torch.auto_deploy.shim.ad_executor import \
179-
create_autodeploy_executor
180-
create_executor = create_autodeploy_executor
181-
else:
182-
raise ValueError(
183-
f"Unsupported backend config: {executor_config.backend}")
184-
return create_executor(**args)
162+
assert not hasattr(executor_config, "backend")
163+
return tllm.Executor(engine, tllm.ModelType.DECODER_ONLY,
164+
executor_config)
185165

186166
self.engine = _create_py_executor(
187167
executor_config) if llm_args is not None else _create_engine(

0 commit comments

Comments
 (0)