Skip to content

Commit 7cedc3e

Browse files
authored
Make Document a top-level class (#74472)
There is no reason for Document to be an inner class of ParseContext, especially as it is public and accessed directly from many different places. This commit takes it out to its own top-level class file, which has the advantage of simplifying ParseContext which could use some love too.
1 parent 7493250 commit 7cedc3e

File tree

47 files changed

+358
-343
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+358
-343
lines changed

modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException {
3838
}
3939

4040
@Override
41-
protected void assertExistsQuery(MappedFieldType fieldType, Query query, ParseContext.Document fields) {
41+
protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) {
4242
assertThat(query, instanceOf(TermQuery.class));
4343
TermQuery termQuery = (TermQuery) query;
4444
assertEquals("_feature", termQuery.getTerm().field());

modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,19 +133,19 @@ public TokenStreamComponents createComponents(String fieldName) {
133133

134134
public void testParseNullValue() throws Exception {
135135
DocumentMapper mapper = createIndexWithTokenCountField();
136-
ParseContext.Document doc = parseDocument(mapper, createDocument(null));
136+
LuceneDocument doc = parseDocument(mapper, createDocument(null));
137137
assertNull(doc.getField("test.tc"));
138138
}
139139

140140
public void testParseEmptyValue() throws Exception {
141141
DocumentMapper mapper = createIndexWithTokenCountField();
142-
ParseContext.Document doc = parseDocument(mapper, createDocument(""));
142+
LuceneDocument doc = parseDocument(mapper, createDocument(""));
143143
assertEquals(0, doc.getField("test.tc").numericValue());
144144
}
145145

146146
public void testParseNotNullValue() throws Exception {
147147
DocumentMapper mapper = createIndexWithTokenCountField();
148-
ParseContext.Document doc = parseDocument(mapper, createDocument("three tokens string"));
148+
LuceneDocument doc = parseDocument(mapper, createDocument("three tokens string"));
149149
assertEquals(3, doc.getField("test.tc").numericValue());
150150
}
151151

@@ -173,7 +173,7 @@ private SourceToParse createDocument(String fieldValue) throws Exception {
173173
return source(b -> b.field("test", fieldValue));
174174
}
175175

176-
private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) {
176+
private LuceneDocument parseDocument(DocumentMapper mapper, SourceToParse request) {
177177
return mapper.parse(request)
178178
.docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed"));
179179
}

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
import org.elasticsearch.Version;
3737
import org.elasticsearch.action.ActionListener;
3838
import org.elasticsearch.action.get.GetRequest;
39-
import org.elasticsearch.common.xcontent.ParseField;
4039
import org.elasticsearch.common.bytes.BytesReference;
4140
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
4241
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
@@ -46,23 +45,24 @@
4645
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
4746
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
4847
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
48+
import org.elasticsearch.common.xcontent.ParseField;
4949
import org.elasticsearch.common.xcontent.XContentBuilder;
5050
import org.elasticsearch.common.xcontent.XContentFactory;
5151
import org.elasticsearch.common.xcontent.XContentHelper;
5252
import org.elasticsearch.common.xcontent.XContentParser;
5353
import org.elasticsearch.common.xcontent.XContentType;
5454
import org.elasticsearch.index.fielddata.IndexFieldData;
5555
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
56+
import org.elasticsearch.index.mapper.LuceneDocument;
5657
import org.elasticsearch.index.mapper.MappedFieldType;
57-
import org.elasticsearch.index.mapper.ParseContext;
5858
import org.elasticsearch.index.mapper.ParsedDocument;
5959
import org.elasticsearch.index.mapper.SourceToParse;
6060
import org.elasticsearch.index.query.AbstractQueryBuilder;
6161
import org.elasticsearch.index.query.QueryBuilder;
6262
import org.elasticsearch.index.query.QueryRewriteContext;
63-
import org.elasticsearch.index.query.SearchExecutionContext;
6463
import org.elasticsearch.index.query.QueryShardException;
6564
import org.elasticsearch.index.query.Rewriteable;
65+
import org.elasticsearch.index.query.SearchExecutionContext;
6666
import org.elasticsearch.indices.breaker.CircuitBreakerService;
6767
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
6868

@@ -509,7 +509,7 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<P
509509
Directory directory = new ByteBuffersDirectory();
510510
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) {
511511
// Indexing in order here, so that the user provided order matches with the docid sequencing:
512-
Iterable<ParseContext.Document> iterable = () -> docs.stream()
512+
Iterable<LuceneDocument> iterable = () -> docs.stream()
513513
.map(ParsedDocument::docs)
514514
.flatMap(Collection::stream)
515515
.iterator();

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
import org.elasticsearch.index.mapper.ContentPath;
4747
import org.elasticsearch.index.mapper.FieldMapper;
4848
import org.elasticsearch.index.mapper.KeywordFieldMapper;
49+
import org.elasticsearch.index.mapper.LuceneDocument;
4950
import org.elasticsearch.index.mapper.MappedFieldType;
5051
import org.elasticsearch.index.mapper.Mapper;
5152
import org.elasticsearch.index.mapper.MapperParsingException;
@@ -381,7 +382,7 @@ static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbFi
381382
}
382383

383384
void processQuery(Query query, ParseContext context) {
384-
ParseContext.Document doc = context.doc();
385+
LuceneDocument doc = context.doc();
385386
PercolatorFieldType pft = (PercolatorFieldType) this.fieldType();
386387
QueryAnalyzer.Result result;
387388
result = QueryAnalyzer.analyze(query);

modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@
6666
import org.apache.lucene.store.Directory;
6767
import org.apache.lucene.util.BytesRef;
6868
import org.elasticsearch.Version;
69-
import org.elasticsearch.core.CheckedFunction;
7069
import org.elasticsearch.common.Strings;
7170
import org.elasticsearch.common.bytes.BytesArray;
7271
import org.elasticsearch.common.bytes.BytesReference;
@@ -75,8 +74,10 @@
7574
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
7675
import org.elasticsearch.common.settings.Settings;
7776
import org.elasticsearch.common.xcontent.XContentFactory;
77+
import org.elasticsearch.core.CheckedFunction;
7878
import org.elasticsearch.index.IndexService;
7979
import org.elasticsearch.index.mapper.DocumentMapper;
80+
import org.elasticsearch.index.mapper.LuceneDocument;
8081
import org.elasticsearch.index.mapper.MappedFieldType;
8182
import org.elasticsearch.index.mapper.MapperService;
8283
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -230,7 +231,7 @@ public void testDuel() throws Exception {
230231
return new FunctionScoreQuery(innerQuery, minScore, 1f);
231232
});
232233

233-
List<ParseContext.Document> documents = new ArrayList<>();
234+
List<LuceneDocument> documents = new ArrayList<>();
234235
for (Supplier<Query> queryFunction : queryFunctions) {
235236
Query query = queryFunction.get();
236237
addQuery(query, documents);
@@ -326,7 +327,7 @@ public void testDuel2() throws Exception {
326327
ranges.add(new int[]{15, 50});
327328

328329
SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext();
329-
List<ParseContext.Document> documents = new ArrayList<>();
330+
List<LuceneDocument> documents = new ArrayList<>();
330331
{
331332
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
332333
}
@@ -490,7 +491,7 @@ public void testDuelIdBased() throws Exception {
490491
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));
491492

492493
int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
493-
List<ParseContext.Document> documents = new ArrayList<>();
494+
List<LuceneDocument> documents = new ArrayList<>();
494495
for (int i = 0; i < numDocs; i++) {
495496
String id = Integer.toString(i);
496497
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
@@ -520,7 +521,7 @@ public void testDuelIdBased() throws Exception {
520521
}
521522

522523
public void testDuelSpecificQueries() throws Exception {
523-
List<ParseContext.Document> documents = new ArrayList<>();
524+
List<LuceneDocument> documents = new ArrayList<>();
524525

525526
BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(new Term[]{new Term("field", "quick"),
526527
new Term("field", "brown"), new Term("field", "fox")}, 1.0f);
@@ -567,7 +568,7 @@ public void testDuelSpecificQueries() throws Exception {
567568
}
568569

569570
public void testRangeQueries() throws Exception {
570-
List<ParseContext.Document> docs = new ArrayList<>();
571+
List<LuceneDocument> docs = new ArrayList<>();
571572
addQuery(IntPoint.newRangeQuery("int_field", 0, 5), docs);
572573
addQuery(LongPoint.newRangeQuery("long_field", 5L, 10L), docs);
573574
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", 10, 15), docs);
@@ -634,7 +635,7 @@ public void testRangeQueries() throws Exception {
634635
}
635636

636637
public void testDuelRangeQueries() throws Exception {
637-
List<ParseContext.Document> documents = new ArrayList<>();
638+
List<LuceneDocument> documents = new ArrayList<>();
638639

639640
int lowerInt = randomIntBetween(0, 256);
640641
int upperInt = lowerInt + randomIntBetween(0, 32);
@@ -737,7 +738,7 @@ public void testDuelRangeQueries() throws Exception {
737738
}
738739

739740
public void testPercolateMatchAll() throws Exception {
740-
List<ParseContext.Document> docs = new ArrayList<>();
741+
List<LuceneDocument> docs = new ArrayList<>();
741742
addQuery(new MatchAllDocsQuery(), docs);
742743
BooleanQuery.Builder builder = new BooleanQuery.Builder();
743744
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
@@ -783,7 +784,7 @@ public void testPercolateMatchAll() throws Exception {
783784
}
784785

785786
public void testFunctionScoreQuery() throws Exception {
786-
List<ParseContext.Document> docs = new ArrayList<>();
787+
List<LuceneDocument> docs = new ArrayList<>();
787788
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), null, 1f), docs);
788789
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), 10f, 1f), docs);
789790
addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), null, 1f), docs);
@@ -808,7 +809,7 @@ public void testFunctionScoreQuery() throws Exception {
808809
}
809810

810811
public void testPercolateSmallAndLargeDocument() throws Exception {
811-
List<ParseContext.Document> docs = new ArrayList<>();
812+
List<LuceneDocument> docs = new ArrayList<>();
812813
BooleanQuery.Builder builder = new BooleanQuery.Builder();
813814
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
814815
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
@@ -901,7 +902,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception {
901902
}
902903

903904
public void testDuplicatedClauses() throws Exception {
904-
List<ParseContext.Document> docs = new ArrayList<>();
905+
List<LuceneDocument> docs = new ArrayList<>();
905906

906907
BooleanQuery.Builder builder = new BooleanQuery.Builder();
907908
BooleanQuery.Builder builder1 = new BooleanQuery.Builder();
@@ -950,7 +951,7 @@ public void testDuplicatedClauses() throws Exception {
950951
}
951952

952953
public void testDuplicatedClauses2() throws Exception {
953-
List<ParseContext.Document> docs = new ArrayList<>();
954+
List<LuceneDocument> docs = new ArrayList<>();
954955

955956
BooleanQuery.Builder builder = new BooleanQuery.Builder();
956957
builder.setMinimumNumberShouldMatch(3);
@@ -1001,7 +1002,7 @@ public void testDuplicatedClauses2() throws Exception {
10011002
public void testMsmAndRanges_disjunction() throws Exception {
10021003
// Recreates a similar scenario that made testDuel() fail randomly:
10031004
// https://github.com/elastic/elasticsearch/issues/29393
1004-
List<ParseContext.Document> docs = new ArrayList<>();
1005+
List<LuceneDocument> docs = new ArrayList<>();
10051006
BooleanQuery.Builder builder = new BooleanQuery.Builder();
10061007
builder.setMinimumNumberShouldMatch(2);
10071008

@@ -1101,11 +1102,11 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd
11011102
}
11021103
}
11031104

1104-
private void addQuery(Query query, List<ParseContext.Document> docs) {
1105+
private void addQuery(Query query, List<LuceneDocument> docs) {
11051106
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(
11061107
documentMapper.mappers(), indexService.getIndexSettings(), indexService.getIndexAnalyzers(), null, null, null);
11071108
fieldMapper.processQuery(query, parseContext);
1108-
ParseContext.Document queryDocument = parseContext.doc();
1109+
LuceneDocument queryDocument = parseContext.doc();
11091110
// Add to string representation of the query to make debugging easier:
11101111
queryDocument.add(new StoredField("query_to_string", query.toString()));
11111112
docs.add(queryDocument);

modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
import org.elasticsearch.common.Strings;
3535
import org.elasticsearch.common.bytes.BytesArray;
3636
import org.elasticsearch.common.bytes.BytesReference;
37-
import org.elasticsearch.core.Tuple;
3837
import org.elasticsearch.common.compress.CompressedXContent;
3938
import org.elasticsearch.common.hash.MurmurHash3;
4039
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
@@ -46,8 +45,10 @@
4645
import org.elasticsearch.common.xcontent.XContentBuilder;
4746
import org.elasticsearch.common.xcontent.XContentFactory;
4847
import org.elasticsearch.common.xcontent.XContentType;
48+
import org.elasticsearch.core.Tuple;
4949
import org.elasticsearch.index.IndexService;
5050
import org.elasticsearch.index.mapper.DocumentMapper;
51+
import org.elasticsearch.index.mapper.LuceneDocument;
5152
import org.elasticsearch.index.mapper.MapperParsingException;
5253
import org.elasticsearch.index.mapper.MapperService;
5354
import org.elasticsearch.index.mapper.ParseContext;
@@ -173,7 +174,7 @@ public void testExtractTerms() throws Exception {
173174
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
174175
mapperService.getIndexSettings(), null, null, null, null);
175176
fieldMapper.processQuery(bq.build(), parseContext);
176-
ParseContext.Document document = parseContext.doc();
177+
LuceneDocument document = parseContext.doc();
177178

178179
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
179180
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
@@ -225,7 +226,7 @@ public void testExtractRanges() throws Exception {
225226
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
226227
mapperService.getIndexSettings(), null, null, null, null);
227228
fieldMapper.processQuery(bq.build(), parseContext);
228-
ParseContext.Document document = parseContext.doc();
229+
LuceneDocument document = parseContext.doc();
229230

230231
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
231232
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
@@ -275,7 +276,7 @@ public void testExtractTermsAndRanges_failed() throws Exception {
275276
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
276277
mapperService.getIndexSettings(), null, null, null, null);
277278
fieldMapper.processQuery(query, parseContext);
278-
ParseContext.Document document = parseContext.doc();
279+
LuceneDocument document = parseContext.doc();
279280

280281
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
281282
assertThat(document.getFields().size(), equalTo(1));
@@ -290,7 +291,7 @@ public void testExtractTermsAndRanges_partial() throws Exception {
290291
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
291292
mapperService.getIndexSettings(), null, null, null, null);
292293
fieldMapper.processQuery(phraseQuery, parseContext);
293-
ParseContext.Document document = parseContext.doc();
294+
LuceneDocument document = parseContext.doc();
294295

295296
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
296297
assertThat(document.getFields().size(), equalTo(3));

modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,14 +19,15 @@
1919
import org.apache.lucene.store.Directory;
2020
import org.elasticsearch.Version;
2121
import org.elasticsearch.cluster.metadata.IndexMetadata;
22-
import org.elasticsearch.core.CheckedFunction;
2322
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
2423
import org.elasticsearch.common.settings.Settings;
2524
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
25+
import org.elasticsearch.core.CheckedFunction;
2626
import org.elasticsearch.index.fielddata.plain.BytesBinaryIndexFieldData;
2727
import org.elasticsearch.index.mapper.BinaryFieldMapper;
2828
import org.elasticsearch.index.mapper.ContentPath;
2929
import org.elasticsearch.index.mapper.KeywordFieldMapper;
30+
import org.elasticsearch.index.mapper.LuceneDocument;
3031
import org.elasticsearch.index.mapper.ParseContext;
3132
import org.elasticsearch.index.query.SearchExecutionContext;
3233
import org.elasticsearch.index.query.TermQueryBuilder;
@@ -68,7 +69,7 @@ public void testStoringQueryBuilders() throws IOException {
6869
for (int i = 0; i < queryBuilders.length; i++) {
6970
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
7071
ParseContext parseContext = mock(ParseContext.class);
71-
ParseContext.Document document = new ParseContext.Document();
72+
LuceneDocument document = new LuceneDocument();
7273
when(parseContext.doc()).thenReturn(document);
7374
PercolatorFieldMapper.createQueryBuilderField(version,
7475
fieldMapper, queryBuilders[i], parseContext);

server/src/main/java/org/elasticsearch/index/engine/Engine.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -30,27 +30,27 @@
3030
import org.apache.lucene.util.SetOnce;
3131
import org.elasticsearch.ExceptionsHelper;
3232
import org.elasticsearch.action.index.IndexRequest;
33-
import org.elasticsearch.core.CheckedRunnable;
34-
import org.elasticsearch.core.Nullable;
3533
import org.elasticsearch.common.bytes.BytesReference;
3634
import org.elasticsearch.common.collect.ImmutableOpenMap;
37-
import org.elasticsearch.core.Releasable;
38-
import org.elasticsearch.core.Releasables;
3935
import org.elasticsearch.common.logging.Loggers;
4036
import org.elasticsearch.common.lucene.Lucene;
4137
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
4238
import org.elasticsearch.common.lucene.uid.Versions;
4339
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
4440
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndVersion;
4541
import org.elasticsearch.common.metrics.CounterMetric;
46-
import org.elasticsearch.core.TimeValue;
4742
import org.elasticsearch.common.util.concurrent.ReleasableLock;
43+
import org.elasticsearch.core.CheckedRunnable;
44+
import org.elasticsearch.core.Nullable;
45+
import org.elasticsearch.core.Releasable;
46+
import org.elasticsearch.core.Releasables;
47+
import org.elasticsearch.core.TimeValue;
4848
import org.elasticsearch.index.VersionType;
4949
import org.elasticsearch.index.mapper.DocumentParser;
5050
import org.elasticsearch.index.mapper.IdFieldMapper;
51+
import org.elasticsearch.index.mapper.LuceneDocument;
5152
import org.elasticsearch.index.mapper.Mapping;
5253
import org.elasticsearch.index.mapper.MappingLookup;
53-
import org.elasticsearch.index.mapper.ParseContext.Document;
5454
import org.elasticsearch.index.mapper.ParsedDocument;
5555
import org.elasticsearch.index.mapper.Uid;
5656
import org.elasticsearch.index.merge.MergeStats;
@@ -1346,7 +1346,7 @@ public String routing() {
13461346
return this.doc.routing();
13471347
}
13481348

1349-
public List<Document> docs() {
1349+
public List<LuceneDocument> docs() {
13501350
return this.doc.docs();
13511351
}
13521352

0 commit comments

Comments
 (0)