Skip to content

Commit bd0de70

Browse files
committed
Jackson2JsonTokenizer unwraps top level array only
When the input stream is a JSON array the tokenizer skips over the start and end array tokens in order to decode into an array of top-level objects. However in this mode it also skips over nested start and end array tokens which prevents proper parsing of JSR-310 dates (represented with an array syntax) as well as any fields that are collections. Issue: SPR-15803
1 parent c3e3df5 commit bd0de70

File tree

2 files changed

+19
-1
lines changed

2 files changed

+19
-1
lines changed

spring-web/src/main/java/org/springframework/http/codec/json/Jackson2Tokenizer.java

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ private void processTokenNormal(JsonToken token, List<TokenBuffer> result) throw
140140
}
141141

142142
private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws IOException {
143-
if (token != JsonToken.START_ARRAY && token != JsonToken.END_ARRAY) {
143+
if (!isTopLevelArrayToken(token)) {
144144
this.tokenBuffer.copyCurrentEvent(this.parser);
145145
}
146146

@@ -151,6 +151,11 @@ private void processTokenArray(JsonToken token, List<TokenBuffer> result) throws
151151
}
152152
}
153153

154+
private boolean isTopLevelArrayToken(JsonToken token) {
155+
return (token == JsonToken.START_ARRAY && this.arrayDepth == 1) ||
156+
(token == JsonToken.END_ARRAY && this.arrayDepth == 0);
157+
}
158+
154159
public void endOfInput() {
155160
this.inputFeeder.endOfInput();
156161
}

spring-web/src/test/java/org/springframework/http/codec/json/Jackson2TokenizerTests.java

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -116,6 +116,19 @@ public void tokenizeArrayElements() {
116116
asList("{\"foo\": \"bar\"}",
117117
"{\"foo\": \"baz\"}"));
118118

119+
// SPR-15803
120+
testTokenize(
121+
singletonList("[" +
122+
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
123+
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}," +
124+
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}" +
125+
"]"),
126+
asList(
127+
"{\"id\":\"0\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
128+
"{\"id\":\"1\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}",
129+
"{\"id\":\"2\",\"start\":[-999999999,1,1],\"end\":[999999999,12,31]}")
130+
);
131+
119132
testTokenize(
120133
asList("[{\"foo\": \"foofoo\", \"bar\"",
121134
": \"barbar\"},{\"foo\": \"foofoofoo\", \"bar\": \"barbarbar\"}]"),

0 commit comments

Comments
 (0)