Skip to content

Commit 91de8d2

Browse files
committed
Fix ProtobufDecoder handling of split message size
This commit introduces a new readMessageSize(DataBuffer input) private method, inspired from CodedInputStream#readRawVarint32(int, InputStream) and adapted for DataBuffer using MessageDecoderFunction fields in order to support use cases where the message size is split between distinct chunks. It also fixes handling of end of streams by using DataBuffer#readableByteCount instead of -1 which is only relevant with InputStream. Issue: SPR-17429
1 parent 77ab88b commit 91de8d2

File tree

2 files changed

+96
-5
lines changed

2 files changed

+96
-5
lines changed

spring-web/src/main/java/org/springframework/http/codec/protobuf/ProtobufDecoder.java

Lines changed: 55 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -172,6 +172,8 @@ private class MessageDecoderFunction implements Function<DataBuffer, Iterable<?
172172

173173
private int messageBytesToRead;
174174

175+
private int offset;
176+
175177

176178
public MessageDecoderFunction(ResolvableType elementType, int maxMessageSize) {
177179
this.elementType = elementType;
@@ -188,11 +190,9 @@ public Iterable<? extends Message> apply(DataBuffer input) {
188190

189191
do {
190192
if (this.output == null) {
191-
int firstByte = input.read();
192-
if (firstByte == -1) {
193-
throw new DecodingException("Cannot parse message size");
193+
if (!readMessageSize(input)) {
194+
return messages;
194195
}
195-
this.messageBytesToRead = CodedInputStream.readRawVarint32(firstByte, input.asInputStream());
196196
if (this.messageBytesToRead > this.maxMessageSize) {
197197
throw new DecodingException(
198198
"The number of bytes to read from the incoming stream " +
@@ -235,6 +235,57 @@ public Iterable<? extends Message> apply(DataBuffer input) {
235235
DataBufferUtils.release(input);
236236
}
237237
}
238+
239+
/**
240+
* Parse message size as a varint from the input stream, updating {@code messageBytesToRead} and
241+
* {@code offset} fields if needed to allow processing of upcoming chunks.
242+
* Inspired from {@link CodedInputStream#readRawVarint32(int, java.io.InputStream)}
243+
*
244+
* @return {code true} when the message size is parsed successfully, {code false} when the message size is
245+
* truncated
246+
* @see <a href ="https://developers.google.com/protocol-buffers/docs/encoding#varints">Base 128 Varints</a>
247+
*/
248+
private boolean readMessageSize(DataBuffer input) {
249+
if (this.offset == 0) {
250+
if (input.readableByteCount() == 0) {
251+
return false;
252+
}
253+
int firstByte = input.read();
254+
if ((firstByte & 0x80) == 0) {
255+
this.messageBytesToRead = firstByte;
256+
return true;
257+
}
258+
this.messageBytesToRead = firstByte & 0x7f;
259+
this.offset = 7;
260+
}
261+
262+
if (this.offset < 32) {
263+
for (; this.offset < 32; this.offset += 7) {
264+
if (input.readableByteCount() == 0) {
265+
return false;
266+
}
267+
final int b = input.read();
268+
this.messageBytesToRead |= (b & 0x7f) << offset;
269+
if ((b & 0x80) == 0) {
270+
this.offset = 0;
271+
return true;
272+
}
273+
}
274+
}
275+
// Keep reading up to 64 bits.
276+
for (; this.offset < 64; this.offset += 7) {
277+
if (input.readableByteCount() == 0) {
278+
return false;
279+
}
280+
final int b = input.read();
281+
if ((b & 0x80) == 0) {
282+
this.offset = 0;
283+
return true;
284+
}
285+
}
286+
this.offset = 0;
287+
throw new DecodingException("Cannot parse message size: malformed varint");
288+
}
238289
}
239290

240291
}

spring-web/src/test/java/org/springframework/http/codec/protobuf/ProtobufDecoderTests.java

Lines changed: 41 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,9 @@ public void decode() {
128128
}
129129

130130
@Test
131-
public void decodeSplitChunks() throws IOException {
131+
public void decodeSplitChunks() {
132+
133+
132134
Flux<DataBuffer> input = Flux.just(this.testMsg1, this.testMsg2)
133135
.flatMap(msg -> Mono.defer(() -> {
134136
DataBuffer buffer = this.bufferFactory.allocateBuffer();
@@ -158,6 +160,44 @@ public void decodeSplitChunks() throws IOException {
158160
.verifyComplete());
159161
}
160162

163+
@Test // SPR-17429
164+
public void decodeSplitMessageSize() {
165+
this.decoder.setMaxMessageSize(100009);
166+
StringBuilder builder = new StringBuilder();
167+
for (int i = 0; i < 10000; i++) {
168+
builder.append("azertyuiop");
169+
}
170+
Msg bigMessage = Msg.newBuilder().setFoo(builder.toString()).setBlah(secondMsg2).build();
171+
172+
Flux<DataBuffer> input = Flux.just(bigMessage, bigMessage)
173+
.flatMap(msg -> Mono.defer(() -> {
174+
DataBuffer buffer = this.bufferFactory.allocateBuffer();
175+
try {
176+
msg.writeDelimitedTo(buffer.asOutputStream());
177+
return Mono.just(buffer);
178+
}
179+
catch (IOException e) {
180+
release(buffer);
181+
return Mono.error(e);
182+
}
183+
}))
184+
.flatMap(buffer -> {
185+
int len = 2;
186+
Flux<DataBuffer> result = Flux.just(
187+
DataBufferUtils.retain(buffer.slice(0, len)),
188+
DataBufferUtils
189+
.retain(buffer.slice(len, buffer.readableByteCount() - len))
190+
);
191+
release(buffer);
192+
return result;
193+
});
194+
195+
testDecode(input, Msg.class, step -> step
196+
.expectNext(bigMessage)
197+
.expectNext(bigMessage)
198+
.verifyComplete());
199+
}
200+
161201
@Test
162202
public void decodeMergedChunks() throws IOException {
163203
DataBuffer buffer = this.bufferFactory.allocateBuffer();

0 commit comments

Comments
 (0)