Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -247,21 +247,20 @@ public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCa
if (fieldCaps.get(dateType).isAggregatable()) {
return;
} else {
validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
validationException.addValidationError("The field [" + field + "] must be aggregatable, " +
"but is not.");
}
}
}
if (matchesDateType == false) {
validationException.addValidationError("The field referenced by a date_histo group must be one of type [" +
Strings.collectionToCommaDelimitedString(RollupField.DATE_FIELD_MAPPER_TYPES) + "] across all " +
"indices in the index pattern. Found: " + fieldCaps.keySet().toString() + " for field [" + field + "]");
Strings.collectionToCommaDelimitedString(RollupField.DATE_FIELD_MAPPER_TYPES) + "]." +
" Found: " + fieldCaps.keySet().toString() + " for field [" + field + "]");
}
} else {
validationException.addValidationError("Could not find one of [" +
Strings.collectionToCommaDelimitedString(RollupField.DATE_FIELD_MAPPER_TYPES) + "] fields with name [" +
field + "] in any of the indices matching " +
"the index pattern.");
field + "].");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,26 @@
*/
package org.elasticsearch.xpack.core.rollup;

import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;

import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;


public class RollupActionConfigTests extends AbstractSerializingTestCase<RollupActionConfig> {
Expand Down Expand Up @@ -54,4 +63,49 @@ public void testEmptyMetrics() {
new RollupActionConfig(groupConfig, randomBoolean() ? null : emptyList()));
assertThat(e.getMessage(), equalTo("At least one metric must be configured"));
}

public void testValidateMapping() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
String type = getRandomType();

// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities myFieldCaps = mock(FieldCapabilities.class);
when(myFieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap(type, myFieldCaps));
responseMap.put("date_field", Collections.singletonMap("date", myFieldCaps));
responseMap.put("group_field", Collections.singletonMap("keyword", myFieldCaps));
responseMap.put("metric_field", Collections.singletonMap("short", myFieldCaps));

RollupActionConfig config = new RollupActionConfig(new RollupActionGroupConfig(
new RollupActionDateHistogramGroupConfig.FixedInterval("date_field", DateHistogramInterval.DAY),
null, new TermsGroupConfig("group_field")),
List.of(new MetricConfig("metric_field", List.of("max"))));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().size(), equalTo(0));
}

private String getRandomType() {
int n = randomIntBetween(0,8);
if (n == 0) {
return "keyword";
} else if (n == 1) {
return "text";
} else if (n == 2) {
return "long";
} else if (n == 3) {
return "integer";
} else if (n == 4) {
return "short";
} else if (n == 5) {
return "float";
} else if (n == 6) {
return "double";
} else if (n == 7) {
return "scaled_float";
} else if (n == 8) {
return "half_float";
}
return "long";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;

import java.io.IOException;
import java.util.Collections;
Expand Down Expand Up @@ -46,11 +45,10 @@ public void testValidateNoMapping() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field] in " +
"any of the indices matching the index pattern."));
assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field]."));
}

public void testValidateNomatchingField() {
Expand All @@ -61,11 +59,10 @@ public void testValidateNomatchingField() {
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("some_other_field", Collections.singletonMap("date", fieldCaps));

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field] in " +
"any of the indices matching the index pattern."));
assertThat(e.validationErrors().get(0), equalTo("Could not find one of [date,date_nanos] fields with name [my_field]."));
}

public void testValidateFieldWrongType() {
Expand All @@ -76,11 +73,11 @@ public void testValidateFieldWrongType() {
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
responseMap.put("my_field", Collections.singletonMap("keyword", fieldCaps));

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be one of type " +
"[date,date_nanos] across all indices in the index pattern. Found: [keyword] for field [my_field]"));
"[date,date_nanos]. Found: [keyword] for field [my_field]"));
}

public void testValidateFieldMixtureTypes() {
Expand All @@ -94,11 +91,11 @@ public void testValidateFieldMixtureTypes() {
types.put("keyword", fieldCaps);
responseMap.put("my_field", types);

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a date_histo group must be one of type " +
"[date,date_nanos] across all indices in the index pattern. Found: [date, keyword] for field [my_field]"));
"[date,date_nanos]. Found: [date, keyword] for field [my_field]"));
}

public void testValidateFieldMatchingNotAggregatable() {
Expand All @@ -110,10 +107,10 @@ public void testValidateFieldMatchingNotAggregatable() {
when(fieldCaps.isAggregatable()).thenReturn(false);
responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));

DateHistogramGroupConfig config =new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config =new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable, but is not."));
}

public void testValidateMatchingField() {
Expand All @@ -125,8 +122,8 @@ public void testValidateMatchingField() {
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1d"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().size(), equalTo(0));
}
Expand All @@ -140,8 +137,8 @@ public void testValidateWeek() {
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("my_field", Collections.singletonMap("date", fieldCaps));

DateHistogramGroupConfig config = new DateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1w"), null, null);
RollupActionDateHistogramGroupConfig config = new RollupActionDateHistogramGroupConfig.CalendarInterval("my_field",
new DateHistogramInterval("1w"));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().size(), equalTo(0));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,24 @@
*/
package org.elasticsearch.xpack.core.rollup;

import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig;
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;

import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomRollupActionGroupConfig;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

public class RollupActionGroupConfigSerializingTests extends AbstractSerializingTestCase<RollupActionGroupConfig> {

Expand All @@ -30,4 +41,41 @@ protected Writeable.Reader<RollupActionGroupConfig> instanceReader() {
protected RollupActionGroupConfig createTestInstance() {
return randomRollupActionGroupConfig(random());
}

public void testValidatesDateHistogramConfig() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
when(fieldCaps.isAggregatable()).thenReturn(true);
responseMap.put("date_field", Collections.singletonMap("not_date", fieldCaps));
RollupActionGroupConfig config = new RollupActionGroupConfig(
new RollupActionDateHistogramGroupConfig.FixedInterval("date_field", DateHistogramInterval.DAY));
config.validateMappings(responseMap, e);
assertThat(e.validationErrors().size(), equalTo(1));
}

public void testValidatesAllSubConfigs() {
ActionRequestValidationException e = new ActionRequestValidationException();
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
// Have to mock fieldcaps because the ctor's aren't public...
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
when(fieldCaps.isAggregatable()).thenReturn(false);
responseMap.put("date_field", Collections.singletonMap("date", fieldCaps));
responseMap.put("terms_field", Collections.singletonMap("keyword", fieldCaps));
responseMap.put("histogram_field", Collections.singletonMap("keyword", fieldCaps));
RollupActionGroupConfig config = new RollupActionGroupConfig(
new RollupActionDateHistogramGroupConfig.FixedInterval("date_field", DateHistogramInterval.DAY),
new HistogramGroupConfig(132, "histogram_field"), new TermsGroupConfig("terms_field"));
config.validateMappings(responseMap, e);
// all fields are non-aggregatable
assertThat(e.validationErrors().size(), equalTo(3));
assertThat(e.validationErrors().get(0),
equalTo("The field [date_field] must be aggregatable, but is not."));
assertThat(e.validationErrors().get(1),
equalTo("The field referenced by a histo group must be a [numeric] type, " +
"but found [keyword] for field [histogram_field]"));
assertThat(e.validationErrors().get(2),
equalTo("The field [terms_field] must be aggregatable across all indices, but is not."));
}
}
Loading