Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions bindings/python/src/decoders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -685,6 +685,13 @@ impl PyDecodeStream {
))
.into()
}
fn __copy__(&self) -> Self {
self.clone()
}

fn __deepcopy__(&self, _memo: &Bound<'_, PyDict>) -> Self {
self.clone()
}
}

#[cfg(test)]
Expand Down
22 changes: 22 additions & 0 deletions bindings/python/tests/bindings/test_tokenizer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pickle
import copy
import concurrent.futures
import pytest
import numpy as np
Expand Down Expand Up @@ -374,6 +375,27 @@ def test_decode(self):
stream = DecodeStream(ids=[0, 1, 2])
assert stream.step(tokenizer, 3) == " john"

def test_decode_stream_copy_and_prefix_ids(self):
tokenizer = Tokenizer(BPE())
tokenizer.add_tokens(["my", "name", "is", "john"])
token_ids = [0, 1, 2, 3]

stream = DecodeStream(skip_special_tokens=False)
assert stream.step(tokenizer, token_ids[0]) == "my"
assert stream.step(tokenizer, token_ids[1]) == " name"
stream_copy = copy.copy(stream)
assert stream.step(tokenizer, token_ids[2]) == " is"
assert stream_copy.step(tokenizer, token_ids[2]) == " is"
assert stream.step(tokenizer, token_ids[3]) == " john"
assert stream_copy.step(tokenizer, token_ids[3]) == " john"

stream_steps = DecodeStream([])
last_chunk = None
for tid in token_ids:
last_chunk = stream_steps.step(tokenizer, tid)
stream_prefill = DecodeStream(token_ids[:-1])
assert stream_prefill.step(tokenizer, token_ids[-1]) == last_chunk

def test_decode_stream_fallback(self):
tokenizer = Tokenizer.from_pretrained("gpt2")
# tokenizer.decode([255]) fails because its a fallback
Expand Down
Loading