diff --git a/.kokoro/github/ubuntu/gpu/build.sh b/.kokoro/github/ubuntu/gpu/build.sh index 2571d0d048..f3b0095977 100644 --- a/.kokoro/github/ubuntu/gpu/build.sh +++ b/.kokoro/github/ubuntu/gpu/build.sh @@ -25,7 +25,7 @@ if [ "${KERAS2:-0}" == "1" ] then echo "Keras2 detected." pip install -r requirements-common.txt --progress-bar off - pip install tensorflow-text==2.14 tensorflow==2.14 keras-core + pip install tensorflow-text==2.15 tensorflow[and-cuda]~=2.15 keras-core elif [ "$KERAS_BACKEND" == "tensorflow" ] then diff --git a/.kokoro/github/ubuntu/gpu/keras2/presubmit.cfg b/.kokoro/github/ubuntu/gpu/keras2/presubmit.cfg index e988e3b375..7e971ac96d 100644 --- a/.kokoro/github/ubuntu/gpu/keras2/presubmit.cfg +++ b/.kokoro/github/ubuntu/gpu/keras2/presubmit.cfg @@ -7,5 +7,10 @@ action { } } +env_vars: { + key: "KERAS2" + value: "1" +} + # Set timeout to 60 mins from default 180 mins timeout_mins: 60 \ No newline at end of file diff --git a/keras_nlp/models/whisper/whisper_tokenizer.py b/keras_nlp/models/whisper/whisper_tokenizer.py index b1406b0a04..cd4da7d15f 100644 --- a/keras_nlp/models/whisper/whisper_tokenizer.py +++ b/keras_nlp/models/whisper/whisper_tokenizer.py @@ -23,7 +23,7 @@ def _load_dict(dict_or_path): if isinstance(dict_or_path, str): - with open(dict_or_path, "r") as f: + with open(dict_or_path, "r", encoding="utf-8") as f: dict_or_path = json.load(f) return dict_or_path diff --git a/keras_nlp/tokenizers/byte_pair_tokenizer.py b/keras_nlp/tokenizers/byte_pair_tokenizer.py index 133c9565b0..b799874d2a 100644 --- a/keras_nlp/tokenizers/byte_pair_tokenizer.py +++ b/keras_nlp/tokenizers/byte_pair_tokenizer.py @@ -292,7 +292,7 @@ def __init__( super().__init__(dtype=dtype, **kwargs) if isinstance(vocabulary, str): - with open(vocabulary, "r") as f: + with open(vocabulary, "r", encoding="utf-8") as f: self.vocabulary = json.load(f) elif isinstance(vocabulary, dict): self.vocabulary = vocabulary.copy()