Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@

---
date:
- skip:
version: " - 7.99.99"
reason: introduced in 8.0.0 to be backported to 7.16.0

- do:
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 0
number_of_shards: 2
mappings:
properties:
"@timestamp":
type: date
metricset:
type: keyword
dimension: true

- do:
indices.get_mapping:
index: test
- match: { "[email protected]": date }

- do:
bulk:
refresh: true
index: test_index
body:
- '{"index": {}}'
- '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod"}'

- do:
search:
index: test_index
body:
docvalue_fields: [ '@timestamp' ]
- match: {hits.total.value: 1}
- match: { "hits.hits.0.fields.@timestamp": ["2021-04-28T18:50:04.467Z"] }

---
date_nanos:
- skip:
version: " - 7.99.99"
reason: introduced in 8.0.0 to be backported to 7.16.0

- do:
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 0
number_of_shards: 2
mappings:
properties:
"@timestamp":
type: date_nanos
metricset:
type: keyword
dimension: true

- do:
indices.get_mapping:
index: test
- match: { "[email protected]": date_nanos }

- do:
bulk:
refresh: true
index: test_index
body:
- '{"index": {}}'
- '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod"}'

- do:
search:
index: test_index
body:
docvalue_fields: [ '@timestamp' ]
- match: {hits.total.value: 1}
- match: { "hits.hits.0.fields.@timestamp": ["2021-04-28T18:50:04.467Z"] }

---
automatically add with date:
- skip:
version: " - 7.99.99"
reason: introduced in 8.0.0 to be backported to 7.16.0

- do:
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 0
number_of_shards: 2
mappings:
properties:
metricset:
type: keyword
dimension: true

- do:
indices.get_mapping:
index: test
- match: { 'test.mappings.properties.@timestamp': { "type": date } }

- do:
bulk:
refresh: true
index: test_index
body:
- '{"index": {}}'
- '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod"}'

- do:
search:
index: test_index
body:
docvalue_fields: [ '@timestamp' ]
- match: {hits.total.value: 1}
- match: { "hits.hits.0.fields.@timestamp": ["2021-04-28T18:50:04.467Z"] }

---
reject @timestamp with wrong type:
- skip:
version: " - 7.99.99"
reason: introduced in 8.0.0 to be backported to 7.16.0

- do:
catch: /@timestamp must be \[date\] or \[date_nanos\]/
indices.create:
index: test
body:
settings:
index:
mode: time_series
number_of_replicas: 0
number_of_shards: 2
mappings:
properties:
"@timestamp":
type: keyword
34 changes: 34 additions & 0 deletions server/src/main/java/org/elasticsearch/index/IndexMode.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,15 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MappingParserContext;
import org.elasticsearch.index.mapper.RootObjectMapper;

import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Stream;

import static java.util.stream.Collectors.toSet;
Expand All @@ -26,6 +31,9 @@ public enum IndexMode {
STANDARD {
@Override
void validateWithOtherSettings(Map<Setting<?>, Object> settings) {}

@Override
public void completeMappings(MappingParserContext context, RootObjectMapper.Builder builder) {}
},
TIME_SERIES {
@Override
Expand All @@ -43,6 +51,27 @@ void validateWithOtherSettings(Map<Setting<?>, Object> settings) {
private String error(Setting<?> unsupported) {
return "[" + IndexSettings.MODE.getKey() + "=time_series] is incompatible with [" + unsupported.getKey() + "]";
}

@Override
public void completeMappings(MappingParserContext context, RootObjectMapper.Builder builder) {
Optional<Mapper.Builder> timestamp = builder.getBuilder("@timestamp");
if (timestamp.isEmpty()) {
builder.add(
new DateFieldMapper.Builder(
"@timestamp",
DateFieldMapper.Resolution.MILLISECONDS,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER,
context.scriptCompiler(),
DateFieldMapper.IGNORE_MALFORMED_SETTING.get(context.getSettings()),
context.getIndexSettings().getIndexVersionCreated()
)
);
return;
}
if (false == timestamp.get() instanceof DateFieldMapper.Builder) {
throw new IllegalArgumentException("@timestamp must be [date] or [date_nanos]");
}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Data streams also automatically @timestamp as a date but the way that do it seems really heavy to me. They have a special template called DataStreamTemplate and they automatically apply it if the name starts with .ds-. We could do something similar but it'd take a bunch of extra layers of plumbing to get the setting into the template resolution layer. We'd still want the validation somewhere anyway.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It depends, do you want the validation outside of data streams, or as a configuration option on the data stream itself? If you want it outside of data streams (for use with aliases for instance), then don't use the DataStreamTemplate.

Also, I notice that you add the mapper, but don't actually enforce that the @timestamp is filled in the way that a data stream does. I think you also need to validate that every document contains an @timestamp field, is that right? Or are you planning on filling it in automatically if it is missing?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It depends, do you want the validation outside of data streams, or as a configuration option on the data stream itself?

I want it totally independent of data streams. If a time_series mode index is not in a data stream I still want there to be an @timestamp field. I was more wondering if I should make a second special template or use a mechanism like this one here.

Also, I notice that you add the mapper, but don't actually enforce that the @timestamp is filled in the way that a data stream does. I think you also need to validate that every document contains an @timestamp field, is that right? Or are you planning on filling it in automatically if it is missing?

I was planning on adding that test in a follow up change.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This approach seems fine to me then, since you want it to be independent.

I was planning on adding that test in a follow up change.

This should hopefully be easy as using DataStreamTimestampFieldMapper instead of the DateFieldMapper. I think we should probably unify them also so that they share the same infrastructure for validation, for example, it doesn't help if we add a @timestamp field but the user configures it for indexed: false so we can't use it. The DataStreamTimestampFieldMapper does more validation here.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can have a look at unifying them in the follow up, yeah!

}
};

private static final List<Setting<?>> TIME_SERIES_UNSUPPORTED = List.of(
Expand All @@ -57,4 +86,9 @@ private String error(Setting<?> unsupported) {
);

abstract void validateWithOtherSettings(Map<Setting<?>, Object> settings);

/**
* Validate and/or modify the mappings after after they've been parsed.
*/
public abstract void completeMappings(MappingParserContext context, RootObjectMapper.Builder builder);
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;

public class ObjectMapper extends Mapper implements Cloneable {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class);
Expand Down Expand Up @@ -86,6 +87,10 @@ public Builder add(Mapper.Builder builder) {
return this;
}

public Optional<Mapper.Builder> getBuilder(String name) {
return mappersBuilders.stream().filter(b -> b.name().equals(name)).findFirst();
}

protected final Map<String, Mapper> buildMappers(boolean root, MapperBuilderContext context) {
if (root == false) {
context = context.createChildContext(name);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,7 @@ public RootObjectMapper.Builder parse(String name, Map<String, Object> node, Map
iterator.remove();
}
}
parserContext.getIndexSettings().getMode().completeMappings(parserContext, builder);
return builder;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,19 @@

import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperServiceTestCase;

import java.io.IOException;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;

public class TimeSeriesModeTests extends ESTestCase {
public class TimeSeriesModeTests extends MapperServiceTestCase {
public void testPartitioned() {
Settings s = Settings.builder()
.put(IndexMetadata.INDEX_ROUTING_PARTITION_SIZE_SETTING.getKey(), 2)
Expand Down Expand Up @@ -50,4 +58,42 @@ public void testSortOrder() {
Exception e = expectThrows(IllegalArgumentException.class, () -> IndexSettings.MODE.get(s));
assertThat(e.getMessage(), equalTo("[index.mode=time_series] is incompatible with [index.sort.order]"));
}

public void testAddsTimestamp() throws IOException {
Settings s = Settings.builder().put(IndexSettings.MODE.getKey(), "time_series").build();
DocumentMapper mapper = createMapperService(s, mapping(b -> {})).documentMapper();
MappedFieldType timestamp = mapper.mappers().getFieldType("@timestamp");
assertThat(timestamp, instanceOf(DateFieldType.class));
assertThat(((DateFieldType) timestamp).resolution(), equalTo(DateFieldMapper.Resolution.MILLISECONDS));
}

public void testTimestampMillis() throws IOException {
Settings s = Settings.builder().put(IndexSettings.MODE.getKey(), "time_series").build();
DocumentMapper mapper = createMapperService(s, mapping(b -> b.startObject("@timestamp").field("type", "date").endObject()))
.documentMapper();
MappedFieldType timestamp = mapper.mappers().getFieldType("@timestamp");
assertThat(timestamp, instanceOf(DateFieldType.class));
assertThat(((DateFieldType) timestamp).resolution(), equalTo(DateFieldMapper.Resolution.MILLISECONDS));
}

public void testTimestampNanos() throws IOException {
Settings s = Settings.builder().put(IndexSettings.MODE.getKey(), "time_series").build();
DocumentMapper mapper = createMapperService(s, mapping(b -> b.startObject("@timestamp").field("type", "date_nanos").endObject()))
.documentMapper();
MappedFieldType timestamp = mapper.mappers().getFieldType("@timestamp");
assertThat(timestamp, instanceOf(DateFieldType.class));
assertThat(((DateFieldType) timestamp).resolution(), equalTo(DateFieldMapper.Resolution.NANOSECONDS));
}

public void testBadTimestamp() throws IOException {
Settings s = Settings.builder().put(IndexSettings.MODE.getKey(), "time_series").build();
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(
s,
mapping(b -> b.startObject("@timestamp").field("type", randomFrom("keyword", "int", "long", "double", "text")).endObject())
)
);
assertThat(e.getMessage(), equalTo("Failed to parse mapping: @timestamp must be [date] or [date_nanos]"));
}
}