Skip to content
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
package com.netgrif.application.engine.elastic.service;


import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;

import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;


/**
* The ElasticsearchQuerySanitizer class is responsible for sanitizing Elasticsearch queries
* by escaping or removing reserved characters and keywords. This is essential to ensure proper
* handling of Elasticsearch queries and to prevent syntax issues caused by special characters or
* reserved words.
* <p>
* This class provides utility methods to sanitize query strings by escaping predefined reserved
* characters, removing certain reserved characters, and excluding specific keywords if provided.
* The reserved characters and keywords are predefined and managed internally.
*/
@Slf4j
public class ElasticsearchQuerySanitizer {

public static final String[] RESERVED_CHARACTERS_TO_ESCAPE = {"\\", "+", "-", "=", "&&", "||", "!", "(", ")", "{", "}", "[", "]", "^", "\"", "~", "*", "?", ":", "/", "AND", "OR", "NOT", " "};
public static final String[] RESERVED_CHARACTERS_TO_REMOVE = {">", "<"};
public static final Map<String, String> RESERVED_KEYWORDS = prepareReservedKeywords();
Comment thread
Retoocs marked this conversation as resolved.

/**
* Sanitizes the provided Elasticsearch query string by escaping or removing certain reserved
* characters and excluding specific keywords if applicable.
* <p>
* This method applies default sanitization rules and does not consider keyword exclusions.
*
* @param query the Elasticsearch query string to sanitize, such as a search query or filter.
* It must not be null to ensure proper sanitization.
* @return the sanitized query string with reserved characters handled appropriately.
* If the input is empty or null, the behavior depends on the implemented sanitization logic.
*/
public static String sanitize(String query) {
return sanitize(query, null);
}

/**
* Sanitizes the given query string by replacing reserved keywords with their sanitized equivalents,
* excluding the specified keywords from sanitization.
*
* @param query the query string to sanitize, which may contain reserved characters and keywords.
* This string must not be null.
* @param exclude an array of keywords to exclude from sanitization. If null or empty, all reserved
* keywords will be considered for sanitization.
* @return the sanitized query string with reserved keywords appropriately replaced, and excluded
* keywords untouched.
*/
public static String sanitize(String query, String[] exclude) {
if (query == null || query.isBlank()) {
return query;
}
Map<String, String> keywordsToEscape = excludeKeywords(exclude);
String sanitized = StringUtils.replaceEach(query,
keywordsToEscape.keySet().toArray(new String[0]),
keywordsToEscape.values().toArray(new String[0]));
log.trace("Sanitized query: {}", sanitized);
return sanitized;
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.

protected static Map<String, String> prepareReservedKeywords() {
Map<String, String> result = new HashMap<>();
for (String reservedString : RESERVED_CHARACTERS_TO_ESCAPE) {
String escaped = Arrays.stream(reservedString.split(""))
Comment thread
tuplle marked this conversation as resolved.
.map(c -> "\\" + c)
.collect(Collectors.joining(""));
result.put(reservedString, escaped);
}
for (String reservedString : RESERVED_CHARACTERS_TO_REMOVE) {
result.put(reservedString, "\\ ");
Comment thread
tuplle marked this conversation as resolved.
}

return Collections.unmodifiableMap(result);
}

protected static Map<String, String> excludeKeywords(String[] exclude) {
if (exclude == null || exclude.length == 0) {
return RESERVED_KEYWORDS;
}
Map<String, String> keywordsToEscape = new HashMap<>(RESERVED_KEYWORDS);
for (String toExclude : exclude) {
if (RESERVED_KEYWORDS.containsKey(toExclude)) {
keywordsToEscape.remove(toExclude);
}
}
return Collections.unmodifiableMap(keywordsToEscape);
}


}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.netgrif.application.engine.elastic.web.requestbodies;

import com.fasterxml.jackson.annotation.JsonFormat;
import com.netgrif.application.engine.elastic.service.ElasticsearchQuerySanitizer;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
Expand Down Expand Up @@ -60,7 +61,7 @@ public CaseSearchRequest(Map<String, Object> request) {
}
if (request.containsKey("author") && request.get("author") instanceof List) {
List<Map<String, String>> authors = (List<Map<String, String>>) request.get("author");
this.author = authors.stream().map(map -> {
this.author = authors.stream().map(map -> {
Author authorRequest = new Author();
if (map.containsKey("id"))
authorRequest.id = map.get("id");
Expand All @@ -75,7 +76,8 @@ public CaseSearchRequest(Map<String, Object> request) {
this.data = (Map<String, String>) request.get("data");
}
if (request.containsKey("fullText") && request.get("fullText") instanceof String) {
this.fullText = (String) request.get("fullText");
String originalFullText = (String) request.get("fullText");
this.fullText = ElasticsearchQuerySanitizer.sanitize(originalFullText);
}
if (request.containsKey("transition") && request.get("transition") instanceof List) {
this.transition = (List<String>) request.get("transition");
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.netgrif.application.engine.elastic.web.requestbodies;

import com.netgrif.application.engine.elastic.service.ElasticsearchQuerySanitizer;
import com.netgrif.application.engine.workflow.web.requestbodies.TaskSearchRequest;
import com.netgrif.application.engine.workflow.web.requestbodies.taskSearch.PetriNet;
import com.netgrif.application.engine.workflow.web.requestbodies.taskSearch.TaskSearchCaseRequest;
Expand All @@ -14,14 +15,14 @@
@AllArgsConstructor
public class ElasticTaskSearchRequest extends TaskSearchRequest {
public String query;

public ElasticTaskSearchRequest(Map<String, Object> request) {
if (request.containsKey("role") && request.get("role") instanceof List) {
this.role = (List<String>) request.get("role");
}
if (request.containsKey("useCase") && request.get("useCase") instanceof List) {
List<Map<String, String>> useCases = (List<Map<String, String>>) request.get("useCase");
this.useCase = useCases.stream().map(map -> {
this.useCase = useCases.stream().map(map -> {
TaskSearchCaseRequest useCase = new TaskSearchCaseRequest();
if (map.containsKey("id"))
useCase.id = map.get("id");
Expand All @@ -44,7 +45,8 @@ public ElasticTaskSearchRequest(Map<String, Object> request) {
this.transitionId = (List<String>) request.get("transitionId");
}
if (request.containsKey("fullText") && request.get("fullText") instanceof String) {
this.fullText = (String) request.get("fullText");
String originalFullText = (String) request.get("fullText");
this.fullText = ElasticsearchQuerySanitizer.sanitize(originalFullText);
}
if (request.containsKey("group") && request.get("group") instanceof List) {
this.group = (List<String>) request.get("group");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.netgrif.application.engine.elastic.web.requestbodies.CaseSearchRequest;
import com.netgrif.application.engine.utils.SingleItemAsList;
import com.netgrif.application.engine.utils.SingleItemAsListDeserializer;
import com.netgrif.application.engine.workflow.utils.CaseSearchRequestSingleItemAsListDeserializer;

@JsonDeserialize(using = SingleItemAsListDeserializer.class, contentAs = CaseSearchRequest.class)
@JsonDeserialize(using = CaseSearchRequestSingleItemAsListDeserializer.class, contentAs = CaseSearchRequest.class)
public class SingleCaseSearchRequestAsList extends SingleItemAsList<CaseSearchRequest> {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.netgrif.application.engine.elastic.web.requestbodies.ElasticTaskSearchRequest;
import com.netgrif.application.engine.utils.SingleItemAsList;
import com.netgrif.application.engine.utils.SingleItemAsListDeserializer;
import com.netgrif.application.engine.workflow.utils.TaskSearchRequestSingleItemAsListDeserializer;

@JsonDeserialize(using = SingleItemAsListDeserializer.class, contentAs = ElasticTaskSearchRequest.class)
@JsonDeserialize(using = TaskSearchRequestSingleItemAsListDeserializer.class, contentAs = ElasticTaskSearchRequest.class)
public class SingleElasticTaskSearchRequestAsList extends SingleItemAsList<ElasticTaskSearchRequest> {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@
import org.springframework.web.server.ResponseStatusException;

import java.io.IOException;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.List;
import java.util.Objects;

public class SingleItemAsListDeserializer extends StdDeserializer<Object> implements ContextualDeserializer {

Expand All @@ -27,13 +30,16 @@ protected SingleItemAsListDeserializer(Class<? extends SingleItemAsList> vc) {

@Override
public JsonDeserializer<?> createContextual(DeserializationContext deserializationContext, BeanProperty beanProperty) {
return new SingleItemAsListDeserializer((Class<? extends SingleItemAsList>) getItemClass(deserializationContext, beanProperty));
}

protected Class<?> getItemClass(DeserializationContext deserializationContext, BeanProperty beanProperty) {
final JavaType type;
if (beanProperty != null)
type = beanProperty.getType();
else
type = deserializationContext.getContextualType();

return new SingleItemAsListDeserializer((Class<? extends SingleItemAsList>) type.getRawClass());
return type.getRawClass();
}

@Override
Expand Down Expand Up @@ -64,4 +70,15 @@ public Object deserialize(JsonParser jsonParser, DeserializationContext deserial

return wrapper;
}

protected boolean isWrapperClass(Object object, Class<?> wrapperClass, Class<?> wrappedClass) {
try {
Type superClass = object.getClass().getGenericSuperclass();
return Objects.equals(object.getClass(), wrapperClass) ||
(superClass != null &&
Objects.equals(((ParameterizedType) superClass).getActualTypeArguments()[0], wrappedClass));
} catch (Exception e) {
return false;
}
}
Comment thread
tuplle marked this conversation as resolved.
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package com.netgrif.application.engine.workflow.utils;

import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.netgrif.application.engine.elastic.service.ElasticsearchQuerySanitizer;
import com.netgrif.application.engine.elastic.web.requestbodies.CaseSearchRequest;
import com.netgrif.application.engine.elastic.web.requestbodies.singleaslist.SingleCaseSearchRequestAsList;
import com.netgrif.application.engine.utils.SingleItemAsList;
import com.netgrif.application.engine.utils.SingleItemAsListDeserializer;

import java.io.IOException;
import java.util.List;

/**
* Custom deserializer for handling JSON deserialization of objects that extend
* the {@link SingleItemAsList} class, specifically designed for handling
* {@link CaseSearchRequest} and ensuring its fields are properly sanitized.
* <p>
* This deserializer extends the functionality of {@link SingleItemAsListDeserializer}
* to additionally process the deserialized objects that represent case search requests.
* It ensures that the `fullText` field in each case search request is sanitized
* using {@link ElasticsearchQuerySanitizer}.
* <p>
* It also provides a mechanism to dynamically determine the appropriate type
* using the contextual information during deserialization.
*/
public class CaseSearchRequestSingleItemAsListDeserializer extends SingleItemAsListDeserializer {

protected CaseSearchRequestSingleItemAsListDeserializer() {
this(null);
}

protected CaseSearchRequestSingleItemAsListDeserializer(Class<? extends SingleItemAsList> vc) {
super(vc);
}

@Override
public JsonDeserializer<?> createContextual(DeserializationContext deserializationContext, BeanProperty beanProperty) {
return new CaseSearchRequestSingleItemAsListDeserializer((Class<? extends SingleItemAsList>) getItemClass(deserializationContext, beanProperty));
}

/**
* Deserializes a JSON structure into an object, specifically handling instances that
* may extend the {@code SingleCaseSearchRequestAsList}. During deserialization, it
* sanitizes the `fullText` field in each {@code CaseSearchRequest} object for security
* purposes using {@code ElasticsearchQuerySanitizer}.
*
* @param jsonParser the {@code JsonParser} used for reading the JSON input
* @param deserializationContext the {@code DeserializationContext} providing access
* to contextual information during deserialization
* @return the deserialized object, with sanitization applied if it is an instance of
* {@code SingleCaseSearchRequestAsList}
* @throws IOException if any I/O error occurs during deserialization
* @throws IllegalArgumentException if the object could not be properly instantiated or deserialized
*/
@Override
public Object deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, IllegalArgumentException {
Object result = super.deserialize(jsonParser, deserializationContext);
if (isWrapperClass(result, SingleCaseSearchRequestAsList.class, CaseSearchRequest.class)) {
List<CaseSearchRequest> list = ((SingleCaseSearchRequestAsList) result).getList();
list.forEach(request ->
request.fullText = ElasticsearchQuerySanitizer.sanitize(request.fullText));
}
return result;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package com.netgrif.application.engine.workflow.utils;

import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.netgrif.application.engine.elastic.service.ElasticsearchQuerySanitizer;
import com.netgrif.application.engine.elastic.web.requestbodies.ElasticTaskSearchRequest;
import com.netgrif.application.engine.elastic.web.requestbodies.singleaslist.SingleElasticTaskSearchRequestAsList;
import com.netgrif.application.engine.utils.SingleItemAsList;
import com.netgrif.application.engine.utils.SingleItemAsListDeserializer;
import com.netgrif.application.engine.workflow.web.requestbodies.TaskSearchRequest;
import com.netgrif.application.engine.workflow.web.requestbodies.singleaslist.SingleTaskSearchRequestAsList;

import java.io.IOException;
import java.util.Collections;
import java.util.List;

/**
* Custom deserializer for handling cases where single `TaskSearchRequest` items
* are sent as lists or standalone entities during JSON deserialization.
* <p>
* This class extends the `SingleItemAsListDeserializer`, enabling support for
* deserialization scenarios where JSON may represent either a single item or a list of items.
* It ensures compatibility with `SingleTaskSearchRequestAsList` by sanitizing the `fullText` field
* in each `TaskSearchRequest` instance using the `ElasticsearchQuerySanitizer`.
*/
public class TaskSearchRequestSingleItemAsListDeserializer extends SingleItemAsListDeserializer {

protected TaskSearchRequestSingleItemAsListDeserializer() {
this(null);
}

protected TaskSearchRequestSingleItemAsListDeserializer(Class<? extends SingleItemAsList> vc) {
super(vc);
}

@Override
public JsonDeserializer<?> createContextual(DeserializationContext deserializationContext, BeanProperty beanProperty) {
return new TaskSearchRequestSingleItemAsListDeserializer((Class<? extends SingleItemAsList>) getItemClass(deserializationContext, beanProperty));
}

/**
* Deserializes a JSON input into an object while handling cases where a single
* `TaskSearchRequest` or a list of `TaskSearchRequest` objects is included. If
* the object is a `SingleTaskSearchRequestAsList`, it processes each `TaskSearchRequest`
* in the list by sanitizing the `fullText` field using `ElasticsearchQuerySanitizer`.
*
* @param jsonParser the JSON parser used to parse the incoming JSON content
* @param deserializationContext the context for deserialization, providing shared
* state and configuration
* @return the deserialized object, with sanitization applied to `TaskSearchRequest.fullText`
* if applicable
* @throws IOException if an I/O error occurs during parsing
* @throws IllegalArgumentException if the deserialization process encounters an error
*/
@Override
public Object deserialize(JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException, IllegalArgumentException {
Object result = super.deserialize(jsonParser, deserializationContext);
if (isWrapperClass(result, SingleTaskSearchRequestAsList.class, TaskSearchRequest.class) ||
isWrapperClass(result, SingleElasticTaskSearchRequestAsList.class, ElasticTaskSearchRequest.class)) {
List<? extends TaskSearchRequest> list = Collections.emptyList();
if (result instanceof SingleTaskSearchRequestAsList) {
list = ((SingleTaskSearchRequestAsList) result).getList();
} else if (result instanceof SingleElasticTaskSearchRequestAsList) {
list = ((SingleElasticTaskSearchRequestAsList) result).getList();
}
list.forEach(request ->
request.fullText = ElasticsearchQuerySanitizer.sanitize(request.fullText));
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
return result;
}

}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.netgrif.application.engine.utils.SingleItemAsList;
import com.netgrif.application.engine.utils.SingleItemAsListDeserializer;
import com.netgrif.application.engine.workflow.utils.TaskSearchRequestSingleItemAsListDeserializer;
import com.netgrif.application.engine.workflow.web.requestbodies.TaskSearchRequest;

@JsonDeserialize(using = SingleItemAsListDeserializer.class, contentAs = TaskSearchRequest.class)
@JsonDeserialize(using = TaskSearchRequestSingleItemAsListDeserializer.class, contentAs = TaskSearchRequest.class)
public class SingleTaskSearchRequestAsList extends SingleItemAsList<TaskSearchRequest> {
}
}
Loading
Loading