Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.datastreams.DataStreamsPlugin;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.plugins.ActionPlugin;
Expand Down Expand Up @@ -103,7 +104,7 @@ public void testUpdateByQueryOnRegularIndex() {
assumeTrue("requires disable_sequence_numbers feature flag", IndexSettings.DISABLE_SEQUENCE_NUMBERS_FEATURE_FLAG);

boolean disableSequenceNumbers = randomBoolean();
createIndex("test-index", indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), disableSequenceNumbers).build());
createIndex("test-index", disableSeqNoSettings(disableSequenceNumbers));
indexDoc("test-index", "1", "field", "value");
refresh("test-index");

Expand All @@ -126,7 +127,7 @@ public void testDeleteByQueryOnRegularIndex() {
assumeTrue("requires disable_sequence_numbers feature flag", IndexSettings.DISABLE_SEQUENCE_NUMBERS_FEATURE_FLAG);

boolean disableSequenceNumbers = randomBoolean();
createIndex("test-index", indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), disableSequenceNumbers).build());
createIndex("test-index", disableSeqNoSettings(disableSequenceNumbers));
indexDoc("test-index", "1", "field", "value");
refresh("test-index");

Expand All @@ -149,7 +150,7 @@ public void testDeleteByQueryOnRegularIndex() {
public void testPatternMatchingMultipleIndicesWithMixedSettingsRejects() {
assumeTrue("requires disable_sequence_numbers feature flag", IndexSettings.DISABLE_SEQUENCE_NUMBERS_FEATURE_FLAG);

createIndex("test-index-1", indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build());
createIndex("test-index-1", disableSeqNoSettings(true));
createIndex("test-index-2", indexSettings(1, 0).build());
indexDoc("test-index-1", "1", "field", "value");
indexDoc("test-index-2", "1", "field", "value");
Expand All @@ -167,9 +168,9 @@ public void testPatternMatchingMultipleIndicesWithSameSeqNoDisabledSetting() {
assumeTrue("requires disable_sequence_numbers feature flag", IndexSettings.DISABLE_SEQUENCE_NUMBERS_FEATURE_FLAG);

boolean disableSequenceNumbers = randomBoolean();
Settings seqNoDisabled = indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), disableSequenceNumbers).build();
createIndex("test-index-1", seqNoDisabled);
createIndex("test-index-2", seqNoDisabled);
Settings seqNoSettings = disableSeqNoSettings(disableSequenceNumbers);
createIndex("test-index-1", seqNoSettings);
createIndex("test-index-2", seqNoSettings);
indexDoc("test-index-1", "1", "field", "value");
indexDoc("test-index-2", "1", "field", "value");
refresh("test-index-*");
Expand All @@ -193,8 +194,7 @@ public void testDataStreamWithSeqNoDisabledOnAllBackingIndices() throws Exceptio
assumeTrue("requires disable_sequence_numbers feature flag", IndexSettings.DISABLE_SEQUENCE_NUMBERS_FEATURE_FLAG);

String dsName = "my-data-stream";
Settings dsSettings = Settings.builder().put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build();
createDataStreamWithTemplate(dsName, dsSettings);
createDataStreamWithTemplate(dsName, disableSeqNoTemplateSettings(true));

int numDocs = between(1, 5);
indexDocs(dsName, numDocs);
Expand Down Expand Up @@ -225,7 +225,7 @@ public void testDataStreamWithMixedBackingIndices() throws Exception {
int numDocs = between(1, 5);
indexDocs(dsName, numDocs);

updateDataStreamTemplate(dsName, Settings.builder().put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build());
updateDataStreamTemplate(dsName, disableSeqNoTemplateSettings(true));
rolloverDataStream(dsName);
int numDocs2 = between(1, 5);
indexDocs(dsName, numDocs2);
Expand Down Expand Up @@ -256,12 +256,12 @@ public void testMixedDataStreamAndRegularIndexWithSameResolvedSetting() throws E
int numDocs = between(1, 5);
indexDocs(dsName, numDocs);

updateDataStreamTemplate(dsName, Settings.builder().put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build());
updateDataStreamTemplate(dsName, disableSeqNoTemplateSettings(true));
rolloverDataStream(dsName);
int numDocs2 = between(1, 5);
indexDocs(dsName, numDocs2);

createIndex("test-regular", indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build());
createIndex("test-regular", disableSeqNoSettings(true));
indexDoc("test-regular", "1", "field", "value");
refresh("test-*");

Expand All @@ -288,7 +288,7 @@ public void testMixedDataStreamAndRegularIndexWithDifferentResolvedSettingReject
createDataStreamWithTemplate(dsName, Settings.EMPTY);
indexDocs(dsName, between(1, 5));

updateDataStreamTemplate(dsName, Settings.builder().put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true).build());
updateDataStreamTemplate(dsName, disableSeqNoTemplateSettings(true));
rolloverDataStream(dsName);
indexDocs(dsName, between(1, 5));

Expand Down Expand Up @@ -402,4 +402,20 @@ private void rolloverDataStream(String dsName) {
AcknowledgedResponse response = client().admin().indices().prepareRolloverIndex(dsName).get();
assertTrue(response.isAcknowledged());
}

private Settings disableSeqNoSettings(boolean disableSequenceNumbers) {
Settings.Builder builder = indexSettings(1, 0).put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), disableSequenceNumbers);
if (disableSequenceNumbers) {
builder.put(IndexSettings.SEQ_NO_INDEX_OPTIONS_SETTING.getKey(), SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY);
}
return builder.build();
}

private static Settings disableSeqNoTemplateSettings(boolean disableSequenceNumbers) {
Settings.Builder builder = Settings.builder().put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), disableSequenceNumbers);
if (disableSequenceNumbers) {
builder.put(IndexSettings.SEQ_NO_INDEX_OPTIONS_SETTING.getKey(), SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY);
}
return builder.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.engine.EngineTestCase;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.plugins.Plugin;
Expand Down Expand Up @@ -1108,7 +1109,8 @@ public void testGetWithSequenceNumbersDisabled() {
prepareCreate(index).setSettings(
Settings.builder()
.put(IndexSettings.DISABLE_SEQUENCE_NUMBERS.getKey(), true)
.put("index.refresh_interval", -1)
.put(IndexSettings.SEQ_NO_INDEX_OPTIONS_SETTING.getKey(), SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY)
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), -1)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
)
Expand Down
16 changes: 15 additions & 1 deletion server/src/main/java/org/elasticsearch/index/IndexSettings.java
Original file line number Diff line number Diff line change
Expand Up @@ -1028,12 +1028,26 @@ public void validate(Boolean enabled, Map<Setting<?>, Object> settings) {
)
);
}
// Sequence numbers cannot be trimmed for points, so we enforce doc values only usage
var seqNoIndexOptions = (SeqNoFieldMapper.SeqNoIndexOptions) settings.get(SEQ_NO_INDEX_OPTIONS_SETTING);
if (seqNoIndexOptions != SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY) {
throw new IllegalArgumentException(
String.format(
Locale.ROOT,
"The setting [%s] is only permitted when [%s] is set to [%s]. Current value: [%s].",
DISABLE_SEQUENCE_NUMBERS.getKey(),
SEQ_NO_INDEX_OPTIONS_SETTING.getKey(),
SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY,
seqNoIndexOptions
)
);
}
}
}

@Override
public Iterator<Setting<?>> settings() {
List<Setting<?>> list = List.of(SETTING_INDEX_VERSION_CREATED);
List<Setting<?>> list = List.of(SETTING_INDEX_VERSION_CREATED, SEQ_NO_INDEX_OPTIONS_SETTING);
return list.iterator();
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2805,16 +2805,21 @@ private IndexWriterConfig getIndexWriterConfig() {
MergePolicy mergePolicy = config().getMergePolicy();
// always configure soft-deletes field so an engine with soft-deletes disabled can open a Lucene index with soft-deletes.
iwc.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD);
final var seqNoIndexOptions = engineConfig.getIndexSettings().seqNoIndexOptions();
// sequence numbers are trimmed when doc values only are used
final boolean pruneSeqNo = engineConfig.getIndexSettings().sequenceNumbersDisabled()
&& seqNoIndexOptions == SeqNoFieldMapper.SeqNoIndexOptions.DOC_VALUES_ONLY;
mergePolicy = new RecoverySourcePruneMergePolicy(
engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled() ? null : SourceFieldMapper.RECOVERY_SOURCE_NAME,
engineConfig.getIndexSettings().isRecoverySourceSyntheticEnabled()
? SourceFieldMapper.RECOVERY_SOURCE_SIZE_NAME
: SourceFieldMapper.RECOVERY_SOURCE_NAME,
engineConfig.getIndexSettings().getMode() == IndexMode.TIME_SERIES,
() -> softDeletesPolicy.getRetentionQuery(engineConfig.getIndexSettings().seqNoIndexOptions()),
pruneSeqNo,
() -> softDeletesPolicy.getRetentionQuery(seqNoIndexOptions),
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe in a follow up we should check if we could execute the query once for both policies so we ensure that we prune the same doc ids?

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I can take a look indeed.

new SoftDeletesRetentionMergePolicy(
Lucene.SOFT_DELETES_FIELD,
() -> softDeletesPolicy.getRetentionQuery(engineConfig.getIndexSettings().seqNoIndexOptions()),
() -> softDeletesPolicy.getRetentionQuery(seqNoIndexOptions),
useTsdbSyntheticId ? mergePolicy : new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME)
),
engineConfig.getIndexSettings().useTimeSeriesSyntheticId()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.elasticsearch.index.codec.FilterDocValuesProducer;
import org.elasticsearch.index.codec.storedfields.TSDBStoredFieldsFormat;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.search.internal.FilterStoredFieldVisitor;

import java.io.IOException;
Expand All @@ -43,6 +44,7 @@ final class RecoverySourcePruneMergePolicy extends OneMergeWrappingMergePolicy {
@Nullable String pruneStoredFieldName,
String pruneNumericDVFieldName,
boolean pruneIdField,
boolean pruneSeqNo,
Supplier<Query> retainSourceQuerySupplier,
MergePolicy in,
boolean useSyntheticId
Expand All @@ -55,6 +57,7 @@ public CodecReader wrapForMerge(CodecReader reader) throws IOException {
pruneStoredFieldName,
pruneNumericDVFieldName,
pruneIdField,
pruneSeqNo,
wrapped,
retainSourceQuerySupplier,
useSyntheticId
Expand All @@ -67,16 +70,22 @@ private static CodecReader wrapReader(
String pruneStoredFieldName,
String pruneNumericDVFieldName,
boolean pruneIdField,
boolean pruneSeqNo,
CodecReader reader,
Supplier<Query> retainSourceQuerySupplier,
boolean useSyntheticId
) throws IOException {
assert pruneSeqNo == false || reader.getPointValues(SeqNoFieldMapper.NAME) == null
: "_seq_no points must not exist when sequence number pruning is enabled";
NumericDocValues recoverySource = reader.getNumericDocValues(pruneNumericDVFieldName);
if (recoverySource == null || recoverySource.nextDoc() == DocIdSetIterator.NO_MORE_DOCS) {
final boolean hasRecoverySource = recoverySource != null && recoverySource.nextDoc() != DocIdSetIterator.NO_MORE_DOCS;
NumericDocValues seqNoDocValues = reader.getNumericDocValues(SeqNoFieldMapper.NAME);
final boolean hasSeqNo = pruneSeqNo && seqNoDocValues != null && seqNoDocValues.nextDoc() != DocIdSetIterator.NO_MORE_DOCS;
if (hasRecoverySource == false && hasSeqNo == false) {
if (useSyntheticId) {
return unwrapSyntheticIdStoredFieldsReader(reader);
}
return reader; // early terminate - nothing to do here since none of the docs has a recovery source anymore.
return reader; // early terminate - nothing to do here
}
IndexSearcher s = new IndexSearcher(reader);
s.setQueryCache(null);
Expand All @@ -92,37 +101,41 @@ private static CodecReader wrapReader(
}
return reader; // keep all source
}
return new SourcePruningFilterCodecReader(
return new PruningFilterCodecReader(
pruneStoredFieldName,
pruneNumericDVFieldName,
pruneIdField,
pruneSeqNo,
reader,
recoverySourceToKeep,
useSyntheticId
);
} else {
return new SourcePruningFilterCodecReader(
return new PruningFilterCodecReader(
pruneStoredFieldName,
pruneNumericDVFieldName,
pruneIdField,
pruneSeqNo,
reader,
null,
useSyntheticId
);
}
}

private static class SourcePruningFilterCodecReader extends FilterCodecReader {
private static class PruningFilterCodecReader extends FilterCodecReader {
private final BitSet recoverySourceToKeep;
private final String pruneStoredFieldName;
private final String pruneNumericDVFieldName;
private final boolean pruneIdField;
private final boolean pruneSeqNo;
private final boolean useSyntheticId;

SourcePruningFilterCodecReader(
PruningFilterCodecReader(
@Nullable String pruneStoredFieldName,
String pruneNumericDVFieldName,
boolean pruneIdField,
boolean pruneSeqNo,
CodecReader reader,
BitSet recoverySourceToKeep,
boolean useSyntheticId
Expand All @@ -132,21 +145,29 @@ private static class SourcePruningFilterCodecReader extends FilterCodecReader {
this.recoverySourceToKeep = recoverySourceToKeep;
this.pruneNumericDVFieldName = pruneNumericDVFieldName;
this.pruneIdField = pruneIdField;
this.pruneSeqNo = pruneSeqNo;
this.useSyntheticId = useSyntheticId;
}

private boolean shouldPruneNumericDocValues(String fieldName) {
if (fieldName.equals(pruneNumericDVFieldName)) {
return true;
}
return pruneSeqNo && fieldName.equals(SeqNoFieldMapper.NAME);
}

@Override
public DocValuesProducer getDocValuesReader() {
DocValuesProducer docValuesReader = super.getDocValuesReader();
return new FilterDocValuesProducer(docValuesReader) {
@Override
public NumericDocValues getNumeric(FieldInfo field) throws IOException {
NumericDocValues numeric = super.getNumeric(field);
if (field.name.equals(pruneNumericDVFieldName)) {
assert numeric != null : pruneNumericDVFieldName + " must have numeric DV but was null";
if (shouldPruneNumericDocValues(field.name)) {
assert numeric != null : field.name + " must have numeric doc values but was null";
final DocIdSetIterator intersection;
if (recoverySourceToKeep == null) {
// we can't return null here lucenes DocIdMerger expects an instance
// we can't return null here Lucene's DocIdMerger expects an instance
intersection = DocIdSetIterator.empty();
} else {
intersection = ConjunctionUtils.intersectIterators(
Expand Down Expand Up @@ -185,13 +206,7 @@ public StoredFieldsReader getFieldsReader() {
if (pruneStoredFieldName == null && pruneIdField == false && useSyntheticId == false) {
return fieldsReader;
}
return new RecoverySourcePruningStoredFieldsReader(
fieldsReader,
recoverySourceToKeep,
pruneStoredFieldName,
pruneIdField,
useSyntheticId
);
return new PruningStoredFieldsReader(fieldsReader, recoverySourceToKeep, pruneStoredFieldName, pruneIdField, useSyntheticId);
}

@Override
Expand All @@ -204,14 +219,14 @@ public CacheHelper getReaderCacheHelper() {
return null;
}

private static class RecoverySourcePruningStoredFieldsReader extends FilterStoredFieldsReader {
private static class PruningStoredFieldsReader extends FilterStoredFieldsReader {

private final BitSet recoverySourceToKeep;
private final String recoverySourceField;
private final boolean pruneIdField;
private final boolean useSyntheticId;

RecoverySourcePruningStoredFieldsReader(
PruningStoredFieldsReader(
StoredFieldsReader in,
BitSet recoverySourceToKeep,
@Nullable String recoverySourceField,
Expand Down Expand Up @@ -252,7 +267,7 @@ public Status needsField(FieldInfo fieldInfo) throws IOException {

@Override
public StoredFieldsReader getMergeInstance() {
return new RecoverySourcePruningStoredFieldsReader(
return new PruningStoredFieldsReader(
in.getMergeInstance(),
recoverySourceToKeep,
recoverySourceField,
Expand All @@ -263,13 +278,7 @@ public StoredFieldsReader getMergeInstance() {

@Override
public StoredFieldsReader clone() {
return new RecoverySourcePruningStoredFieldsReader(
in.clone(),
recoverySourceToKeep,
recoverySourceField,
pruneIdField,
useSyntheticId
);
return new PruningStoredFieldsReader(in.clone(), recoverySourceToKeep, recoverySourceField, pruneIdField, useSyntheticId);
}
}
}
Expand Down
Loading