Skip to content
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ public void testDateNanosFormatUpgrade() throws IOException {
Request index = new Request("POST", "/" + indexName + "/_doc/");
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
.startObject()
.field("date", "2015-01-01T12:10:30.123456789Z")
.field("date", "2015-01-01T12:10:30.123Z")
.field("date_nanos", "2015-01-01T12:10:30.123456789Z")
.endObject();
index.addParameter("refresh", "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
index: timetest
body:
mappings:
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}
"properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}

- do:
ingest.put_pipeline:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ setup:
mappings:
properties:
mydate:
type: date
type: date_nanos
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"

- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,9 @@ private Long parseNullValue(DateFieldType fieldType) {
return null;
}
try {
return fieldType.parse(nullValue.getValue());
final String fieldName = fieldType.name();
final String indexName = "null value... but what index?";//context.indexSettings().getIndex().getName();
return fieldType.parseWithDeprecation(nullValue.getValue(), fieldName, nullValue.getValue());
} catch (Exception e) {
if (indexCreatedVersion.onOrAfter(Version.V_8_0_0)) {
throw new MapperParsingException("Error parsing [null_value] on field [" + name() + "]: " + e.getMessage(), e);
Expand All @@ -307,6 +309,7 @@ private Long parseNullValue(DateFieldType fieldType) {
public DateFieldMapper build(MapperBuilderContext context) {
DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(),
buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue());

Long nullTimestamp = parseNullValue(ft);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
copyTo.build(), nullTimestamp, resolution, this);
Expand Down Expand Up @@ -378,7 +381,22 @@ protected DateMathParser dateMathParser() {

// Visible for testing.
public long parse(String value) {
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
final Instant instant = getInstant(value);
return resolution.convert(instant);
}

public long parseWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}

private Instant getInstant(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
}

/**
Expand Down Expand Up @@ -700,7 +718,9 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
timestamp = nullValue;
} else {
try {
timestamp = fieldType().parse(dateAsString);
final String fieldName = fieldType().name();
final String indexName = context.indexSettings().getIndex().getName();
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,18 @@ public void testIgnoreMalformed() throws IOException {
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
}

public void testResolutionLoss() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b
.field("type", "date")));

ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));

assertWarnings("You are attempting to store a nanosecond resolution " +
"on a field [field] of type date on index [index]. " +
"The nanosecond part was lost. Use date_nanos field type.");
}


private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {

DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat)));
Expand Down Expand Up @@ -406,11 +418,11 @@ public void testFetchMillisFromIso8601() throws IOException {
}

public void testFetchMillisFromIso8601Nanos() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
}

public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos");
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
}

/**
Expand All @@ -421,7 +433,8 @@ public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
* way.
*/
public void testFetchMillisFromRoundedNanos() throws IOException {
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null);
assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
}

/**
Expand Down Expand Up @@ -534,7 +547,7 @@ protected Object generateRandomInputValue(MappedFieldType ft) {
switch (((DateFieldType) ft).resolution()) {
case MILLISECONDS:
if (randomBoolean()) {
return randomIs8601Nanos(MAX_ISO_DATE);
return randomDecimalMillis(MAX_ISO_DATE);
}
return randomLongBetween(0, Long.MAX_VALUE);
case NANOSECONDS:
Expand Down Expand Up @@ -567,6 +580,10 @@ private String randomDecimalNanos(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
}

private String randomDecimalMillis(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis));
}

public void testScriptAndPrecludedParameters() {
{
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase;
Expand Down Expand Up @@ -101,7 +102,9 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I

private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }");
final Instant now = Instant.now();
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.rest.ESRestTestCase;

import java.time.ZoneOffset;
Expand Down Expand Up @@ -278,7 +280,7 @@ public void testHRDSplit() throws Exception {

for (int i = 1; i <= 100; i++) {
ZonedDateTime time = baseTime.plusHours(i);
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
if (i % 50 == 0) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down Expand Up @@ -107,8 +107,8 @@ teardown:
test_alias: {}
mappings:
properties:
time:
type: date
date:
type: date_nanos
user:
type: keyword
stars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.xcontent.NamedXContentRegistry;
import org.elasticsearch.xcontent.XContentBuilder;
Expand All @@ -53,6 +54,7 @@
import java.text.DecimalFormatSymbols;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Collections;
Expand Down Expand Up @@ -191,7 +193,7 @@ public void testContinousEvents() throws Exception {
for (int i = 0; i < 100; i++) {
dates.add(
// create a random date between 1/1/2001 and 1/1/2006
ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
formatTimestmap(dateType)
.format(Instant.ofEpochMilli(randomLongBetween(978307200000L, 1136073600000L)))
);
}
Expand Down Expand Up @@ -246,11 +248,11 @@ public void testContinousEvents() throws Exception {
}

// simulate a different timestamp that is off from the timestamp used for sync, so it can fall into the previous bucket
String metricDateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
String metricDateString = formatTimestmap(dateType)
.format(runDate.minusSeconds(randomIntBetween(0, 2)).plusNanos(randomIntBetween(0, 999999)));
source.append("\"metric-timestamp\":\"").append(metricDateString).append("\",");

String dateString = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC"))
String dateString = formatTimestmap(dateType)
.format(runDate.plusNanos(randomIntBetween(0, 999999)));

source.append("\"timestamp\":\"").append(dateString).append("\",");
Expand Down Expand Up @@ -298,6 +300,14 @@ public void testContinousEvents() throws Exception {
}
}

private DateFormatter formatTimestmap(String dateType) {
if(dateType == "date_nanos"){
return DateFormatter.forPattern("strict_date_optional_time_nanos").withZone(ZoneId.of("UTC"));
} else {
return DateFormatter.forPattern("strict_date_optional_time").withZone(ZoneId.of("UTC"));
}
}

/**
* Create the transform source index with randomized settings to increase test coverage, for example
* index sorting, triggers query optimizations.
Expand Down