Skip to content

Commit

Permalink
Add Document limits, validation (#278)
Browse files Browse the repository at this point in the history
  • Loading branch information
tatu-at-datastax authored Mar 20, 2023
1 parent bb48523 commit 45d700f
Show file tree
Hide file tree
Showing 6 changed files with 470 additions and 6 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package io.stargate.sgv2.jsonapi.config;

import io.smallrye.config.ConfigMapping;
import io.smallrye.config.WithDefault;
import javax.validation.constraints.Positive;

/** Configuration Object that defines limits on Documents managed by JSON API. */
@ConfigMapping(prefix = "stargate.jsonapi.doc-limits")
public interface DocumentLimitsConfig {
/**
* @return Defines the maximum document page size, defaults to {@code 1 meg} (1 million
* characters).
*/
@Positive
@WithDefault("1000000")
int maxDocSize();

/** @return Defines the maximum document depth (nesting), defaults to {@code 8 levels} */
@Positive
@WithDefault("8")
int maxDocDepth();

/**
* @return Defines the maximum length of property names in JSON documents, defaults to {@code 48
* characters} (note: length is for individual name segments; full dotted names can be longer)
*/
@Positive
@WithDefault("48")
int maxNameLength();

/**
* @return Defines the maximum number of properties any single Object in JSON document can
* contain, defaults to {@code 64} (note: this is not the total number of properties in the
* whole document, only on individual main or sub-document)
*/
@Positive
@WithDefault("64")
int maxObjectProperties();

/** @return Defines the maximum length of , defaults to {@code 8 levels} */
@Positive
@WithDefault("16000")
int maxStringLength();

@Positive
@WithDefault("100")
int maxArrayLength();
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ public enum ErrorCode {

SHRED_UNRECOGNIZED_NODE_TYPE("Unrecognized JSON node type in input document"),

SHRED_DOC_LIMIT_VIOLATION("Document size limitation violated"),

UNSUPPORTED_FILTER_DATA_TYPE("Unsupported filter data type"),

UNSUPPORTED_FILTER_OPERATION("Unsupported filter operator"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.stargate.sgv2.jsonapi.config.DocumentLimitsConfig;
import io.stargate.sgv2.jsonapi.config.constants.DocumentConstants;
import io.stargate.sgv2.jsonapi.exception.ErrorCode;
import io.stargate.sgv2.jsonapi.exception.JsonApiException;
Expand All @@ -15,6 +16,7 @@
import java.util.Map;
import java.util.UUID;
import javax.enterprise.context.ApplicationScoped;
import javax.inject.Inject;

/**
* Shred an incoming JSON document into the data we need to store in the DB, and then de-shred.
Expand All @@ -30,8 +32,12 @@
public class Shredder {
private final ObjectMapper objectMapper;

public Shredder(ObjectMapper objectMapper) {
private final DocumentLimitsConfig documentLimits;

@Inject
public Shredder(ObjectMapper objectMapper, DocumentLimitsConfig documentLimits) {
this.objectMapper = objectMapper;
this.documentLimits = documentLimits;
}

/**
Expand Down Expand Up @@ -81,6 +87,10 @@ public WritableShreddedDocument shred(JsonNode doc, UUID txId) {
} catch (IOException e) { // never happens but signature exposes it
throw new RuntimeException(e);
}
// Now that we have both the traversable document and serialization, verify
// it does not violate document limits:
validateDocument(documentLimits, docWithId, docJson);

final WritableShreddedDocument.Builder b =
WritableShreddedDocument.builder(new DocValueHasher(), docId, txId, docJson);

Expand Down Expand Up @@ -155,4 +165,103 @@ private void traverseValue(JsonNode value, ShredListener callback, JsonPath.Buil
"%s: %s", ErrorCode.SHRED_UNRECOGNIZED_NODE_TYPE.getMessage(), value.getNodeType()));
}
}

private void validateDocument(DocumentLimitsConfig limits, ObjectNode doc, String docJson) {
// First: is the resulting document size (as serialized) too big?
if (docJson.length() > limits.maxDocSize()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: document size (%d chars) exceeds maximum allowed (%d)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(),
docJson.length(),
limits.maxDocSize()));
}

// Second: traverse to check for other constraints
validateObjectValue(limits, doc, 0);
}

private void validateDocValue(DocumentLimitsConfig limits, JsonNode value, int depth) {
if (value.isObject()) {
validateObjectValue(limits, value, depth);
} else if (value.isArray()) {
validateArrayValue(limits, value, depth);
} else if (value.isTextual()) {
validateStringValue(limits, value);
}
}

private void validateArrayValue(DocumentLimitsConfig limits, JsonNode arrayValue, int depth) {
++depth;
validateDocDepth(limits, depth);

if (arrayValue.size() > limits.maxArrayLength()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: number of elements an Array has (%d) exceeds maximum allowed (%s)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(),
arrayValue.size(),
limits.maxArrayLength()));
}

for (JsonNode element : arrayValue) {
validateDocValue(limits, element, depth);
}
}

private void validateObjectValue(DocumentLimitsConfig limits, JsonNode objectValue, int depth) {
++depth;
validateDocDepth(limits, depth);

if (objectValue.size() > limits.maxObjectProperties()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: number of properties an Object has (%d) exceeds maximum allowed (%s)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(),
objectValue.size(),
limits.maxObjectProperties()));
}

var it = objectValue.fields();
while (it.hasNext()) {
var entry = it.next();
final String key = entry.getKey();
if (key.length() > documentLimits.maxNameLength()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: Property name length (%d) exceeds maximum allowed (%s)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(),
key.length(),
limits.maxNameLength()));
}
validateDocValue(limits, entry.getValue(), depth);
}
}

private void validateStringValue(DocumentLimitsConfig limits, JsonNode stringValue) {
final String value = stringValue.textValue();
if (value.length() > limits.maxStringLength()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: String value length (%d) exceeds maximum allowed (%s)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(),
value.length(),
limits.maxStringLength()));
}
}

private void validateDocDepth(DocumentLimitsConfig limits, int depth) {
if (depth > limits.maxDocDepth()) {
throw new JsonApiException(
ErrorCode.SHRED_DOC_LIMIT_VIOLATION,
String.format(
"%s: document depth exceeds maximum allowed (%s)",
ErrorCode.SHRED_DOC_LIMIT_VIOLATION.getMessage(), limits.maxDocDepth()));
}
}
}
8 changes: 4 additions & 4 deletions src/main/resources/application.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@

stargate:

# disable all sgv2 exception mappers, handled differently
exception-mappers:
enabled: false

# metrics properties
# see io.stargate.sgv2.api.common.config.MetricsConfig for all config properties and options
metrics:
global-tags:
module: sgv2-jsonapi

# disable all sgv2 exception mappers, handled differently
exception-mappers:
enabled: false

quarkus:

# general app properties
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.startsWith;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import io.quarkus.test.common.QuarkusTestResource;
import io.quarkus.test.junit.QuarkusIntegrationTest;
import io.restassured.http.ContentType;
Expand All @@ -26,7 +29,6 @@
@QuarkusIntegrationTest
@QuarkusTestResource(DseTestResource.class)
public class InsertIntegrationTest extends CollectionResourceBaseIntegrationTest {

@AfterEach
public void cleanUpData() {
deleteAllDocuments();
Expand Down Expand Up @@ -268,6 +270,71 @@ public void notValidDocumentMissing() {
.body("errors[0].message", is(not(blankString())))
.body("errors[0].exceptionClass", is("ConstraintViolationException"));
}

@Test
public void tryInsertTooBigArray() {
final ObjectMapper mapper = new ObjectMapper();
// Max array elements allowed is 100; add a few more
ObjectNode doc = mapper.createObjectNode();
ArrayNode arr = doc.putArray("arr");
for (int i = 0; i < 500; ++i) {
arr.add(i);
}
final String json =
"""
{
"insertOne": {
"document": %s
}
}
"""
.formatted(doc);
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, keyspaceId.asInternal(), collectionName)
.then()
.statusCode(200)
.body("errors[0].errorCode", is("SHRED_DOC_LIMIT_VIOLATION"))
.body(
"errors[0].message",
is(
"Document size limitation violated: number of elements an Array has (500) exceeds maximum allowed (100)"));
}

@Test
public void tryInsertTooLongName() {
final ObjectMapper mapper = new ObjectMapper();
// Max property name: 48 characters, let's try 100
ObjectNode doc = mapper.createObjectNode();
doc.put(
"prop_12345_123456789_123456789_123456789_123456789_123456789_123456789_123456789_123456789_123456789",
72);
final String json =
"""
{
"insertOne": {
"document": %s
}
}
"""
.formatted(doc);
given()
.header(HttpConstants.AUTHENTICATION_TOKEN_HEADER_NAME, getAuthToken())
.contentType(ContentType.JSON)
.body(json)
.when()
.post(CollectionResource.BASE_PATH, keyspaceId.asInternal(), collectionName)
.then()
.statusCode(200)
.body("errors[0].errorCode", is("SHRED_DOC_LIMIT_VIOLATION"))
.body(
"errors[0].message",
is(
"Document size limitation violated: Property name length (100) exceeds maximum allowed (48)"));
}
}

@Nested
Expand Down
Loading

0 comments on commit 45d700f

Please sign in to comment.