Fix infinite loop when calling skipValue at the end an object or array.
Fail with a clear message. This is useful when iteratoring through an array, making assumptions about the size without using hasNext. Also, disallow calling skipValue at the end of a document. This is a behavior change.
This commit is contained in:
parent
5f5631e34f
commit
5bf14098d1
3 changed files with 56 additions and 2 deletions
|
@ -957,11 +957,19 @@ final class JsonUtf8Reader extends JsonReader {
|
|||
pushScope(JsonScope.EMPTY_OBJECT);
|
||||
count++;
|
||||
} else if (p == PEEKED_END_ARRAY) {
|
||||
stackSize--;
|
||||
count--;
|
||||
if (count < 0) {
|
||||
throw new JsonDataException(
|
||||
"Expected a value but was " + peek() + " at path " + getPath());
|
||||
}
|
||||
stackSize--;
|
||||
} else if (p == PEEKED_END_OBJECT) {
|
||||
stackSize--;
|
||||
count--;
|
||||
if (count < 0) {
|
||||
throw new JsonDataException(
|
||||
"Expected a value but was " + peek() + " at path " + getPath());
|
||||
}
|
||||
stackSize--;
|
||||
} else if (p == PEEKED_UNQUOTED_NAME || p == PEEKED_UNQUOTED) {
|
||||
skipUnquotedValue();
|
||||
} else if (p == PEEKED_DOUBLE_QUOTED || p == PEEKED_DOUBLE_QUOTED_NAME) {
|
||||
|
@ -970,6 +978,9 @@ final class JsonUtf8Reader extends JsonReader {
|
|||
skipQuotedValue(SINGLE_QUOTE_OR_SLASH);
|
||||
} else if (p == PEEKED_NUMBER) {
|
||||
buffer.skip(peekedNumberLength);
|
||||
} else if (p == PEEKED_EOF) {
|
||||
throw new JsonDataException(
|
||||
"Expected a value but was " + peek() + " at path " + getPath());
|
||||
}
|
||||
peeked = PEEKED_NONE;
|
||||
} while (count != 0);
|
||||
|
|
|
@ -308,6 +308,9 @@ final class JsonValueReader extends JsonReader {
|
|||
|
||||
Object skipped = stackSize != 0 ? stack[stackSize - 1] : null;
|
||||
|
||||
if (skipped instanceof JsonIterator) {
|
||||
throw new JsonDataException("Expected a value but was " + peek() + " at path " + getPath());
|
||||
}
|
||||
if (skipped instanceof Map.Entry) {
|
||||
// We're skipping a name. Promote the map entry's value.
|
||||
Map.Entry<?, ?> entry = (Map.Entry<?, ?>) stack[stackSize - 1];
|
||||
|
@ -315,6 +318,8 @@ final class JsonValueReader extends JsonReader {
|
|||
} else if (stackSize > 0) {
|
||||
// We're skipping a value.
|
||||
remove();
|
||||
} else {
|
||||
throw new JsonDataException("Expected a value but was " + peek() + " at path " + getPath());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -988,6 +988,44 @@ public final class JsonReaderTest {
|
|||
assertThat(reader.hasNext()).isFalse();
|
||||
}
|
||||
|
||||
@Test public void skipValueAtEndOfObjectFails() throws IOException {
|
||||
JsonReader reader = newReader("{}");
|
||||
reader.beginObject();
|
||||
try {
|
||||
reader.skipValue();
|
||||
fail();
|
||||
} catch (JsonDataException expected) {
|
||||
assertThat(expected).hasMessage("Expected a value but was END_OBJECT at path $.");
|
||||
}
|
||||
reader.endObject();
|
||||
assertThat(reader.peek()).isEqualTo(JsonReader.Token.END_DOCUMENT);
|
||||
}
|
||||
|
||||
@Test public void skipValueAtEndOfArrayFails() throws IOException {
|
||||
JsonReader reader = newReader("[]");
|
||||
reader.beginArray();
|
||||
try {
|
||||
reader.skipValue();
|
||||
fail();
|
||||
} catch (JsonDataException expected) {
|
||||
assertThat(expected).hasMessage("Expected a value but was END_ARRAY at path $[0]");
|
||||
}
|
||||
reader.endArray();
|
||||
assertThat(reader.peek()).isEqualTo(JsonReader.Token.END_DOCUMENT);
|
||||
}
|
||||
|
||||
@Test public void skipValueAtEndOfDocumentFails() throws IOException {
|
||||
JsonReader reader = newReader("1");
|
||||
reader.nextInt();
|
||||
try {
|
||||
reader.skipValue();
|
||||
fail();
|
||||
} catch (JsonDataException expected) {
|
||||
assertThat(expected).hasMessage("Expected a value but was END_DOCUMENT at path $");
|
||||
}
|
||||
assertThat(reader.peek()).isEqualTo(JsonReader.Token.END_DOCUMENT);
|
||||
}
|
||||
|
||||
@Test public void basicPeekJson() throws IOException {
|
||||
JsonReader reader = newReader("{\"a\":12,\"b\":[34,56],\"c\":78}");
|
||||
reader.beginObject();
|
||||
|
|
Loading…
Reference in a new issue