2222import java .util .function .Consumer ;
2323
2424import com .fasterxml .jackson .core .JsonFactory ;
25- import com .fasterxml .jackson .core .JsonParser ;
2625import com .fasterxml .jackson .core .TreeNode ;
2726import com .fasterxml .jackson .databind .ObjectMapper ;
27+ import com .fasterxml .jackson .databind .util .TokenBuffer ;
2828import org .json .JSONException ;
2929import org .junit .Before ;
3030import org .junit .Test ;
4343 */
4444public class Jackson2TokenizerTests extends AbstractDataBufferAllocatingTestCase {
4545
46- private JsonParser jsonParser ;
47-
48- private Jackson2Tokenizer tokenizer ;
49-
5046 private ObjectMapper objectMapper ;
5147
48+ private JsonFactory jsonFactory ;
49+
5250 @ Before
5351 public void createParser () throws IOException {
54- JsonFactory factory = new JsonFactory ();
55- this .jsonParser = factory .createNonBlockingByteArrayParser ();
56- this .objectMapper = new ObjectMapper (factory );
52+ jsonFactory = new JsonFactory ();
53+ this .objectMapper = new ObjectMapper (jsonFactory );
5754 }
5855
5956 @ Test
6057 public void doNotTokenizeArrayElements () {
61- this .tokenizer = new Jackson2Tokenizer (this .jsonParser , false );
62-
6358 testTokenize (
6459 singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ),
65- singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ));
60+ singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ), false );
6661
6762 testTokenize (
6863 asList ("{\" foo\" : \" foofoo\" " ,
6964 ", \" bar\" : \" barbar\" }" ),
70- singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ));
65+ singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ), false );
7166
7267 testTokenize (
7368 singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
74- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ));
69+ singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
7570
7671 testTokenize (
7772 singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ),
78- singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ));
73+ singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ), false );
7974
8075 testTokenize (
8176 asList ("[{\" foo\" : \" foofoo\" , \" bar\" " ,
8277 ": \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
83- singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ));
78+ singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ), false );
8479
8580 testTokenize (
8681 asList ("[" ,
@@ -90,31 +85,43 @@ public void doNotTokenizeArrayElements() {
9085 "," ,
9186 "{\" id\" :3,\" name\" :\" Ford\" }" ,
9287 "]" ),
93- singletonList ("[{\" id\" :1,\" name\" :\" Robert\" },{\" id\" :2,\" name\" :\" Raide\" },{\" id\" :3,\" name\" :\" Ford\" }]" ));
88+ singletonList ("[{\" id\" :1,\" name\" :\" Robert\" },{\" id\" :2,\" name\" :\" Raide\" },{\" id\" :3,\" name\" :\" Ford\" }]" ), false );
89+
90+ // SPR-16166: top-level JSON values
91+ testTokenize (asList ("\" foo" , "bar\" " )
92+ ,singletonList ("\" foobar\" " ), false );
93+
94+ testTokenize (asList ("12" , "34" )
95+ ,singletonList ("1234" ), false );
96+
97+ testTokenize (asList ("12." , "34" )
98+ ,singletonList ("12.34" ), false );
99+
100+ // note that we do not test for null, true, or false, which are also valid top-level values,
101+ // but are unsupported by JSONassert
102+
94103 }
95104
96105 @ Test
97106 public void tokenizeArrayElements () {
98- this .tokenizer = new Jackson2Tokenizer (this .jsonParser , true );
99-
100107 testTokenize (
101108 singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ),
102- singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ));
109+ singletonList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ), true );
103110
104111 testTokenize (
105112 asList ("{\" foo\" : \" foofoo\" " ,
106113 ", \" bar\" : \" barbar\" }" ),
107- singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ));
114+ singletonList ("{\" foo\" :\" foofoo\" ,\" bar\" :\" barbar\" }" ), true );
108115
109116 testTokenize (
110117 singletonList ("[{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
111118 asList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
112- "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ));
119+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ), true );
113120
114121 testTokenize (
115122 singletonList ("[{\" foo\" : \" bar\" },{\" foo\" : \" baz\" }]" ),
116123 asList ("{\" foo\" : \" bar\" }" ,
117- "{\" foo\" : \" baz\" }" ));
124+ "{\" foo\" : \" baz\" }" ), true );
118125
119126 // SPR-15803: nested array
120127 testTokenize (
@@ -126,19 +133,19 @@ public void tokenizeArrayElements() {
126133 asList (
127134 "{\" id\" :\" 0\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ,
128135 "{\" id\" :\" 1\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ,
129- "{\" id\" :\" 2\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" )
130- );
136+ "{\" id\" :\" 2\" ,\" start\" :[-999999999,1,1],\" end\" :[999999999,12,31]}" ),
137+ true );
131138
132139 // SPR-15803: nested array, no top-level array
133140 testTokenize (
134141 singletonList ("{\" speakerIds\" :[\" tastapod\" ],\" language\" :\" ENGLISH\" }" ),
135- singletonList ("{\" speakerIds\" :[\" tastapod\" ],\" language\" :\" ENGLISH\" }" ));
142+ singletonList ("{\" speakerIds\" :[\" tastapod\" ],\" language\" :\" ENGLISH\" }" ), true );
136143
137144 testTokenize (
138145 asList ("[{\" foo\" : \" foofoo\" , \" bar\" " ,
139146 ": \" barbar\" },{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }]" ),
140147 asList ("{\" foo\" : \" foofoo\" , \" bar\" : \" barbar\" }" ,
141- "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ));
148+ "{\" foo\" : \" foofoofoo\" , \" bar\" : \" barbarbar\" }" ), true );
142149
143150 testTokenize (
144151 asList ("[" ,
@@ -150,15 +157,27 @@ public void tokenizeArrayElements() {
150157 "]" ),
151158 asList ("{\" id\" :1,\" name\" :\" Robert\" }" ,
152159 "{\" id\" :2,\" name\" :\" Raide\" }" ,
153- "{\" id\" :3,\" name\" :\" Ford\" }" ));
160+ "{\" id\" :3,\" name\" :\" Ford\" }" ), true );
161+
162+ // SPR-16166: top-level JSON values
163+ testTokenize (asList ("\" foo" , "bar\" " )
164+ ,singletonList ("\" foobar\" " ), true );
165+
166+ testTokenize (asList ("12" , "34" )
167+ ,singletonList ("1234" ), true );
168+
169+ testTokenize (asList ("12." , "34" )
170+ ,singletonList ("12.34" ), true );
154171 }
155172
156- private void testTokenize (List <String > source , List <String > expected ) {
173+ private void testTokenize (List <String > source , List <String > expected , boolean tokenizeArrayElements ) {
157174 Flux <DataBuffer > sourceFlux = Flux .fromIterable (source )
158175 .map (this ::stringBuffer );
159176
160- Flux <String > result = sourceFlux
161- .flatMap (this .tokenizer )
177+ Flux <TokenBuffer > tokenBufferFlux =
178+ Jackson2Tokenizer .tokenize (sourceFlux , jsonFactory , tokenizeArrayElements );
179+
180+ Flux <String > result = tokenBufferFlux
162181 .map (tokenBuffer -> {
163182 try {
164183 TreeNode root = this .objectMapper .readTree (tokenBuffer .asParser ());
0 commit comments