1313# limitations under the License.
1414
1515import doctest
16+ import io
1617import os
1718import sys
1819import unittest
1920
2021import numpy as np
2122import pytest
23+ import sentencepiece
2224import tensorflow as tf
2325from tensorflow import keras
2426
2527import keras_nlp
2628from keras_nlp .tests .doc_tests import docstring_lib
29+ from keras_nlp .tests .doc_tests import fenced_docstring_lib
30+ from keras_nlp .tests .doc_tests .fenced_docstring_lib import (
31+ astor , # For checking conditional import.
32+ )
2733
2834PACKAGE = "keras_nlp."
2935
@@ -37,9 +43,6 @@ def find_modules():
3743 return keras_nlp_modules
3844
3945
40- @pytest .mark .skipif (
41- sys .platform == "win32" , reason = "Numpy prints differently on windows"
42- )
4346def test_docstrings ():
4447 keras_nlp_modules = find_modules ()
4548 # As of this writing, it doesn't seem like pytest support load_tests
@@ -77,3 +80,73 @@ def test_docstrings():
7780 if not result .wasSuccessful ():
7881 print (result )
7982 assert result .wasSuccessful ()
83+
84+
85+ @pytest .mark .extra_large
86+ @pytest .mark .skipif (
87+ astor is None ,
88+ reason = "This test requires `astor`. Please `pip install astor` to run." ,
89+ )
90+ def test_fenced_docstrings ():
91+ """Tests fenced code blocks in docstrings.
92+
93+ This can only be run manually. Run with:
94+ `pytest keras_nlp/tests/doc_tests/docstring_test.py --run_extra_large`
95+ """
96+ keras_nlp_modules = find_modules ()
97+
98+ runner = unittest .TextTestRunner ()
99+ suite = unittest .TestSuite ()
100+ for module in keras_nlp_modules :
101+ # Temporarily stop testing gpt2 & deberta docstrings until we are
102+ # exporting the symbols.
103+ if "gpt2" in module .__name__ or "deberta_v3" in module .__name__ :
104+ continue
105+ # Do not test certain modules.
106+ if module .__name__ in [
107+ # Base classes.
108+ "keras_nlp.models.backbone" ,
109+ "keras_nlp.models.preprocessor" ,
110+ # Preprocessors and tokenizers which use `model.spm`.
111+ "keras_nlp.models.albert.albert_preprocessor" ,
112+ "keras_nlp.models.albert.albert_tokenizer" ,
113+ "keras_nlp.models.xlm_roberta.xlm_roberta_preprocessor" ,
114+ "keras_nlp.models.f_net.f_net_preprocessor" ,
115+ "keras_nlp.models.f_net.f_net_tokenizer" ,
116+ ]:
117+ continue
118+
119+ suite .addTest (
120+ doctest .DocTestSuite (
121+ module ,
122+ test_finder = doctest .DocTestFinder (
123+ exclude_empty = False ,
124+ parser = fenced_docstring_lib .FencedCellParser (
125+ fence_label = "python"
126+ ),
127+ ),
128+ globs = {
129+ "_print_if_not_none" : fenced_docstring_lib ._print_if_not_none
130+ },
131+ extraglobs = {
132+ "tf" : tf ,
133+ "np" : np ,
134+ "os" : os ,
135+ "keras" : keras ,
136+ "keras_nlp" : keras_nlp ,
137+ "io" : io ,
138+ "sentencepiece" : sentencepiece ,
139+ },
140+ checker = docstring_lib .DoctestOutputChecker (),
141+ optionflags = (
142+ doctest .ELLIPSIS
143+ | doctest .NORMALIZE_WHITESPACE
144+ | doctest .IGNORE_EXCEPTION_DETAIL
145+ | doctest .DONT_ACCEPT_BLANKLINE
146+ ),
147+ )
148+ )
149+ result = runner .run (suite )
150+ if not result .wasSuccessful ():
151+ print (result )
152+ assert result .wasSuccessful ()
0 commit comments