Skip to content

Commit

Permalink
quick fix (facebookresearch#634)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: facebookresearch#634

When KeyedJaggedTensor doesn't have weights, `.weights()` will throw an assertion error. We should use `.weights_or_none()` to check if a KJT has weights.

Reviewed By: BerenLuthien

Differential Revision: D36005910

fbshipit-source-id: b075ef9949b44fc1186bc124fd42a00e3c9d77f3
  • Loading branch information
czxttkl authored and facebook-github-bot committed Apr 29, 2022
1 parent cc5091e commit d48968a
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 2 deletions.
4 changes: 2 additions & 2 deletions reagent/core/torch_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def reorder_data_kjt(x: KeyedJaggedTensor, indices: torch.Tensor):
[torch.cat([x[y] for y in indices.tolist()]) for x in splitted_vals_per_key]
)
reordered_lengths = torch.cat([x[indices] for x in val_lens_per_key])
if x.weights() is not None:
if x.weights_or_none() is not None:
weights_per_key = torch.tensor_split(x.weights(), acc_lengths_per_key)[:-1]
splitted_weights_per_key = [
torch.tensor_split(x, torch.cumsum(y, dim=0))[:-1]
Expand Down Expand Up @@ -282,7 +282,7 @@ def shift_kjt_by_one(x: KeyedJaggedTensor):
shifted_lengths = torch.cat(
[torch.cat([x[1:], torch.tensor([0])]) for x in val_lens_per_key]
)
if x.weights() is not None:
if x.weights_or_none() is not None:
weights_per_key = torch.tensor_split(x.weights(), acc_lengths_per_key)[:-1]
shifted_weights = torch.cat(
[x[y[0] :] for x, y in zip(weights_per_key, val_lens_per_key)]
Expand Down
22 changes: 22 additions & 0 deletions reagent/test/base/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ def test_reorder_data_kjt(self) -> None:
weights = values / 10.0
lengths = torch.tensor([2, 0, 1, 1, 1, 2])

# With weights
x = KeyedJaggedTensor(
keys=keys, values=values, lengths=lengths, weights=weights
)
Expand All @@ -131,13 +132,23 @@ def test_reorder_data_kjt(self) -> None:
assert torch.allclose(y.lengths(), torch.tensor([1, 0, 2, 2, 1, 1]))
assert torch.allclose(y.weights(), y.values() / 10.0)

# Without weights
x = KeyedJaggedTensor(keys=keys, values=values, lengths=lengths)
y = reorder_data_kjt(x, torch.tensor([2, 1, 0]))
self.assertEqual(y.keys(), keys)
assert torch.allclose(
y.values(), torch.tensor([2.0, 0.0, 1.0, 5.0, 6.0, 4.0, 3.0])
)
assert torch.allclose(y.lengths(), torch.tensor([1, 0, 2, 2, 1, 1]))

def test_shift_kjt_by_one(self) -> None:
"""Test the example in the docstring of shift_kjt_by_one"""
keys = ["Key0", "Key1"]
values = torch.arange(7).float()
weights = values / 10.0
lengths = torch.tensor([2, 0, 1, 1, 1, 2])

# With weights
x = KeyedJaggedTensor(
keys=keys, values=values, lengths=lengths, weights=weights
)
Expand All @@ -146,3 +157,14 @@ def test_shift_kjt_by_one(self) -> None:
assert torch.allclose(y.values(), torch.tensor([2.0, 4.0, 5.0, 6.0]))
assert torch.allclose(y.lengths(), torch.tensor([0, 1, 0, 1, 2, 0]))
assert torch.allclose(y.weights(), y.values() / 10.0)

# Without weights
x = KeyedJaggedTensor(
keys=keys,
values=values,
lengths=lengths,
)
y = shift_kjt_by_one(x)
self.assertEqual(y.keys(), keys)
assert torch.allclose(y.values(), torch.tensor([2.0, 4.0, 5.0, 6.0]))
assert torch.allclose(y.lengths(), torch.tensor([0, 1, 0, 1, 2, 0]))

0 comments on commit d48968a

Please sign in to comment.