Skip to content

Commit 80cbe2a

Browse files
Fixed
Signed-off-by: greg-kwasniewski1 <[email protected]>
1 parent 66a0341 commit 80cbe2a

File tree

3 files changed

+3
-2
lines changed

3 files changed

+3
-2
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def extract_from_precompiled(precompiled_location: str, package_data: List[str],
215215
precompiled_location = download_precompiled(tempdir, version)
216216
extract_from_precompiled(precompiled_location, package_data, tempdir)
217217

218-
# sanity_check()
218+
sanity_check()
219219

220220
# https://setuptools.pypa.io/en/latest/references/keywords.html
221221
setup(

tensorrt_llm/_torch/auto_deploy/config/default.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ transforms:
5656
stage: sharding
5757
simple_shard_only: false
5858
use_sharding_from_factory: false
59-
sharding_dims: ['tp', 'ep', 'dp']
59+
sharding_dims: ['tp', 'ep', 'bmm']
6060
# TODO: (hg) need to ensure run_shape_prop after sharding.
6161
sharding_transform_executor:
6262
stage: sharding

tests/unittest/_torch/auto_deploy/unit/multigpu/transformations/library/test_bmm_sharding.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ def _run_job(
6767
"detect_sharding": {
6868
"stage": "sharding",
6969
"use_sharding_from_factory": False,
70+
"sharding_dims": ["bmm"],
7071
},
7172
"sharding_transform_executor": {
7273
"stage": "sharding",

0 commit comments

Comments
 (0)