Skip to content

Commit 7d0ef61

Browse files
authored
Make Document a top-level class (elastic#74472)
There is no reason for Document to be an inner class of ParseContext, especially as it is public and accessed directly from many different places. This commit takes it out to its own top-level class file, which has the advantage of simplifying ParseContext which could use some love too.
1 parent d851c3c commit 7d0ef61

File tree

48 files changed

+331
-318
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+331
-318
lines changed

modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/RankFeatureFieldMapperTests.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ protected void registerParameters(ParameterChecker checker) throws IOException {
3838
}
3939

4040
@Override
41-
protected void assertExistsQuery(MappedFieldType fieldType, Query query, ParseContext.Document fields) {
41+
protected void assertExistsQuery(MappedFieldType fieldType, Query query, LuceneDocument fields) {
4242
assertThat(query, instanceOf(TermQuery.class));
4343
TermQuery termQuery = (TermQuery) query;
4444
assertEquals("_feature", termQuery.getTerm().field());

modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -139,19 +139,19 @@ public TokenStreamComponents createComponents(String fieldName) {
139139

140140
public void testParseNullValue() throws Exception {
141141
DocumentMapper mapper = createIndexWithTokenCountField();
142-
ParseContext.Document doc = parseDocument(mapper, createDocument(null));
142+
LuceneDocument doc = parseDocument(mapper, createDocument(null));
143143
assertNull(doc.getField("test.tc"));
144144
}
145145

146146
public void testParseEmptyValue() throws Exception {
147147
DocumentMapper mapper = createIndexWithTokenCountField();
148-
ParseContext.Document doc = parseDocument(mapper, createDocument(""));
148+
LuceneDocument doc = parseDocument(mapper, createDocument(""));
149149
assertEquals(0, doc.getField("test.tc").numericValue());
150150
}
151151

152152
public void testParseNotNullValue() throws Exception {
153153
DocumentMapper mapper = createIndexWithTokenCountField();
154-
ParseContext.Document doc = parseDocument(mapper, createDocument("three tokens string"));
154+
LuceneDocument doc = parseDocument(mapper, createDocument("three tokens string"));
155155
assertEquals(3, doc.getField("test.tc").numericValue());
156156
}
157157

@@ -179,7 +179,7 @@ private SourceToParse createDocument(String fieldValue) throws Exception {
179179
return source(b -> b.field("test", fieldValue));
180180
}
181181

182-
private ParseContext.Document parseDocument(DocumentMapper mapper, SourceToParse request) {
182+
private LuceneDocument parseDocument(DocumentMapper mapper, SourceToParse request) {
183183
return mapper.parse(request)
184184
.docs().stream().findFirst().orElseThrow(() -> new IllegalStateException("Test object not parsed"));
185185
}

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,8 @@
5656
import org.elasticsearch.common.xcontent.XContentType;
5757
import org.elasticsearch.index.fielddata.IndexFieldData;
5858
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
59+
import org.elasticsearch.index.mapper.LuceneDocument;
5960
import org.elasticsearch.index.mapper.MappedFieldType;
60-
import org.elasticsearch.index.mapper.ParseContext;
6161
import org.elasticsearch.index.mapper.ParsedDocument;
6262
import org.elasticsearch.index.mapper.SourceToParse;
6363
import org.elasticsearch.index.query.AbstractQueryBuilder;
@@ -596,7 +596,7 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<P
596596
Directory directory = new ByteBuffersDirectory();
597597
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) {
598598
// Indexing in order here, so that the user provided order matches with the docid sequencing:
599-
Iterable<ParseContext.Document> iterable = () -> docs.stream()
599+
Iterable<LuceneDocument> iterable = () -> docs.stream()
600600
.map(ParsedDocument::docs)
601601
.flatMap(Collection::stream)
602602
.iterator();

modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
import org.elasticsearch.index.mapper.ContentPath;
5151
import org.elasticsearch.index.mapper.FieldMapper;
5252
import org.elasticsearch.index.mapper.KeywordFieldMapper;
53+
import org.elasticsearch.index.mapper.LuceneDocument;
5354
import org.elasticsearch.index.mapper.MappedFieldType;
5455
import org.elasticsearch.index.mapper.Mapper;
5556
import org.elasticsearch.index.mapper.MapperParsingException;
@@ -399,7 +400,7 @@ static void createQueryBuilderField(Version indexVersion, BinaryFieldMapper qbFi
399400
}
400401

401402
void processQuery(Query query, ParseContext context) {
402-
ParseContext.Document doc = context.doc();
403+
LuceneDocument doc = context.doc();
403404
PercolatorFieldType pft = (PercolatorFieldType) this.fieldType();
404405
QueryAnalyzer.Result result;
405406
Version indexVersion = context.indexSettings().getIndexVersionCreated();

modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@
7878
import org.elasticsearch.common.xcontent.XContentFactory;
7979
import org.elasticsearch.index.IndexService;
8080
import org.elasticsearch.index.mapper.DocumentMapper;
81+
import org.elasticsearch.index.mapper.LuceneDocument;
8182
import org.elasticsearch.index.mapper.MappedFieldType;
8283
import org.elasticsearch.index.mapper.MapperService;
8384
import org.elasticsearch.index.mapper.NumberFieldMapper;
@@ -230,7 +231,7 @@ public void testDuel() throws Exception {
230231
return new FunctionScoreQuery(innerQuery, minScore, 1f);
231232
});
232233

233-
List<ParseContext.Document> documents = new ArrayList<>();
234+
List<LuceneDocument> documents = new ArrayList<>();
234235
for (Supplier<Query> queryFunction : queryFunctions) {
235236
Query query = queryFunction.get();
236237
addQuery(query, documents);
@@ -326,7 +327,7 @@ public void testDuel2() throws Exception {
326327
ranges.add(new int[]{15, 50});
327328

328329
SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext();
329-
List<ParseContext.Document> documents = new ArrayList<>();
330+
List<LuceneDocument> documents = new ArrayList<>();
330331
{
331332
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
332333
}
@@ -490,7 +491,7 @@ public void testDuelIdBased() throws Exception {
490491
queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all"));
491492

492493
int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3);
493-
List<ParseContext.Document> documents = new ArrayList<>();
494+
List<LuceneDocument> documents = new ArrayList<>();
494495
for (int i = 0; i < numDocs; i++) {
495496
String id = Integer.toString(i);
496497
Query query = queryFunctions.get(i % queryFunctions.size()).apply(id);
@@ -520,7 +521,7 @@ public void testDuelIdBased() throws Exception {
520521
}
521522

522523
public void testDuelSpecificQueries() throws Exception {
523-
List<ParseContext.Document> documents = new ArrayList<>();
524+
List<LuceneDocument> documents = new ArrayList<>();
524525

525526
CommonTermsQuery commonTermsQuery = new CommonTermsQuery(Occur.SHOULD, Occur.SHOULD, 128);
526527
commonTermsQuery.add(new Term("field", "quick"));
@@ -573,7 +574,7 @@ public void testDuelSpecificQueries() throws Exception {
573574
}
574575

575576
public void testRangeQueries() throws Exception {
576-
List<ParseContext.Document> docs = new ArrayList<>();
577+
List<LuceneDocument> docs = new ArrayList<>();
577578
addQuery(IntPoint.newRangeQuery("int_field", 0, 5), docs);
578579
addQuery(LongPoint.newRangeQuery("long_field", 5L, 10L), docs);
579580
addQuery(HalfFloatPoint.newRangeQuery("half_float_field", 10, 15), docs);
@@ -640,7 +641,7 @@ public void testRangeQueries() throws Exception {
640641
}
641642

642643
public void testDuelRangeQueries() throws Exception {
643-
List<ParseContext.Document> documents = new ArrayList<>();
644+
List<LuceneDocument> documents = new ArrayList<>();
644645

645646
int lowerInt = randomIntBetween(0, 256);
646647
int upperInt = lowerInt + randomIntBetween(0, 32);
@@ -743,7 +744,7 @@ public void testDuelRangeQueries() throws Exception {
743744
}
744745

745746
public void testPercolateMatchAll() throws Exception {
746-
List<ParseContext.Document> docs = new ArrayList<>();
747+
List<LuceneDocument> docs = new ArrayList<>();
747748
addQuery(new MatchAllDocsQuery(), docs);
748749
BooleanQuery.Builder builder = new BooleanQuery.Builder();
749750
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
@@ -789,7 +790,7 @@ public void testPercolateMatchAll() throws Exception {
789790
}
790791

791792
public void testFunctionScoreQuery() throws Exception {
792-
List<ParseContext.Document> docs = new ArrayList<>();
793+
List<LuceneDocument> docs = new ArrayList<>();
793794
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), null, 1f), docs);
794795
addQuery(new FunctionScoreQuery(new TermQuery(new Term("field", "value")), 10f, 1f), docs);
795796
addQuery(new FunctionScoreQuery(new MatchAllDocsQuery(), null, 1f), docs);
@@ -814,7 +815,7 @@ public void testFunctionScoreQuery() throws Exception {
814815
}
815816

816817
public void testPercolateSmallAndLargeDocument() throws Exception {
817-
List<ParseContext.Document> docs = new ArrayList<>();
818+
List<LuceneDocument> docs = new ArrayList<>();
818819
BooleanQuery.Builder builder = new BooleanQuery.Builder();
819820
builder.add(new TermQuery(new Term("field", "value1")), Occur.MUST);
820821
builder.add(new TermQuery(new Term("field", "value2")), Occur.MUST);
@@ -907,7 +908,7 @@ public void testPercolateSmallAndLargeDocument() throws Exception {
907908
}
908909

909910
public void testDuplicatedClauses() throws Exception {
910-
List<ParseContext.Document> docs = new ArrayList<>();
911+
List<LuceneDocument> docs = new ArrayList<>();
911912

912913
BooleanQuery.Builder builder = new BooleanQuery.Builder();
913914
BooleanQuery.Builder builder1 = new BooleanQuery.Builder();
@@ -956,7 +957,7 @@ public void testDuplicatedClauses() throws Exception {
956957
}
957958

958959
public void testDuplicatedClauses2() throws Exception {
959-
List<ParseContext.Document> docs = new ArrayList<>();
960+
List<LuceneDocument> docs = new ArrayList<>();
960961

961962
BooleanQuery.Builder builder = new BooleanQuery.Builder();
962963
builder.setMinimumNumberShouldMatch(3);
@@ -1007,7 +1008,7 @@ public void testDuplicatedClauses2() throws Exception {
10071008
public void testMsmAndRanges_disjunction() throws Exception {
10081009
// Recreates a similar scenario that made testDuel() fail randomly:
10091010
// https://github.com/elastic/elasticsearch/issues/29393
1010-
List<ParseContext.Document> docs = new ArrayList<>();
1011+
List<LuceneDocument> docs = new ArrayList<>();
10111012
BooleanQuery.Builder builder = new BooleanQuery.Builder();
10121013
builder.setMinimumNumberShouldMatch(2);
10131014

@@ -1107,11 +1108,11 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd
11071108
}
11081109
}
11091110

1110-
private void addQuery(Query query, List<ParseContext.Document> docs) {
1111+
private void addQuery(Query query, List<LuceneDocument> docs) {
11111112
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(
11121113
documentMapper.mappers(), indexService.getIndexSettings(), indexService.getIndexAnalyzers(), null, null, null);
11131114
fieldMapper.processQuery(query, parseContext);
1114-
ParseContext.Document queryDocument = parseContext.doc();
1115+
LuceneDocument queryDocument = parseContext.doc();
11151116
// Add to string representation of the query to make debugging easier:
11161117
queryDocument.add(new StoredField("query_to_string", query.toString()));
11171118
docs.add(queryDocument);

modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@
4848
import org.elasticsearch.common.xcontent.XContentType;
4949
import org.elasticsearch.index.IndexService;
5050
import org.elasticsearch.index.mapper.DocumentMapper;
51+
import org.elasticsearch.index.mapper.LuceneDocument;
5152
import org.elasticsearch.index.mapper.MapperParsingException;
5253
import org.elasticsearch.index.mapper.MapperService;
5354
import org.elasticsearch.index.mapper.ParseContext;
@@ -173,7 +174,7 @@ public void testExtractTerms() throws Exception {
173174
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
174175
mapperService.getIndexSettings(), null, null, null, null);
175176
fieldMapper.processQuery(bq.build(), parseContext);
176-
ParseContext.Document document = parseContext.doc();
177+
LuceneDocument document = parseContext.doc();
177178

178179
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
179180
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE));
@@ -225,7 +226,7 @@ public void testExtractRanges() throws Exception {
225226
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
226227
mapperService.getIndexSettings(), null, null, null, null);
227228
fieldMapper.processQuery(bq.build(), parseContext);
228-
ParseContext.Document document = parseContext.doc();
229+
LuceneDocument document = parseContext.doc();
229230

230231
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
231232
assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL));
@@ -275,7 +276,7 @@ public void testExtractTermsAndRanges_failed() throws Exception {
275276
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
276277
mapperService.getIndexSettings(), null, null, null, null);
277278
fieldMapper.processQuery(query, parseContext);
278-
ParseContext.Document document = parseContext.doc();
279+
LuceneDocument document = parseContext.doc();
279280

280281
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
281282
assertThat(document.getFields().size(), equalTo(1));
@@ -290,7 +291,7 @@ public void testExtractTermsAndRanges_partial() throws Exception {
290291
ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(documentMapper.mappers(),
291292
mapperService.getIndexSettings(), null, null, null, null);
292293
fieldMapper.processQuery(phraseQuery, parseContext);
293-
ParseContext.Document document = parseContext.doc();
294+
LuceneDocument document = parseContext.doc();
294295

295296
PercolatorFieldMapper.PercolatorFieldType fieldType = (PercolatorFieldMapper.PercolatorFieldType) fieldMapper.fieldType();
296297
assertThat(document.getFields().size(), equalTo(3));

modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@
2727
import org.elasticsearch.index.mapper.BinaryFieldMapper;
2828
import org.elasticsearch.index.mapper.ContentPath;
2929
import org.elasticsearch.index.mapper.KeywordFieldMapper;
30+
import org.elasticsearch.index.mapper.LuceneDocument;
3031
import org.elasticsearch.index.mapper.ParseContext;
3132
import org.elasticsearch.index.query.SearchExecutionContext;
3233
import org.elasticsearch.index.query.TermQueryBuilder;
@@ -68,7 +69,7 @@ public void testStoringQueryBuilders() throws IOException {
6869
for (int i = 0; i < queryBuilders.length; i++) {
6970
queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8));
7071
ParseContext parseContext = mock(ParseContext.class);
71-
ParseContext.Document document = new ParseContext.Document();
72+
LuceneDocument document = new LuceneDocument();
7273
when(parseContext.doc()).thenReturn(document);
7374
PercolatorFieldMapper.createQueryBuilderField(version,
7475
fieldMapper, queryBuilders[i], parseContext);

server/src/main/java/org/elasticsearch/index/engine/Engine.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
import org.elasticsearch.index.mapper.DocumentParser;
5555
import org.elasticsearch.index.mapper.Mapping;
5656
import org.elasticsearch.index.mapper.MappingLookup;
57-
import org.elasticsearch.index.mapper.ParseContext.Document;
57+
import org.elasticsearch.index.mapper.LuceneDocument;
5858
import org.elasticsearch.index.mapper.ParsedDocument;
5959
import org.elasticsearch.index.merge.MergeStats;
6060
import org.elasticsearch.index.seqno.SeqNoStats;
@@ -1391,7 +1391,7 @@ public String routing() {
13911391
return this.doc.routing();
13921392
}
13931393

1394-
public List<Document> docs() {
1394+
public List<LuceneDocument> docs() {
13951395
return this.doc.docs();
13961396
}
13971397

server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,9 @@
6565
import org.elasticsearch.index.VersionType;
6666
import org.elasticsearch.index.mapper.DocumentParser;
6767
import org.elasticsearch.index.mapper.IdFieldMapper;
68+
import org.elasticsearch.index.mapper.LuceneDocument;
6869
import org.elasticsearch.index.mapper.MapperService;
6970
import org.elasticsearch.index.mapper.MappingLookup;
70-
import org.elasticsearch.index.mapper.ParseContext;
7171
import org.elasticsearch.index.mapper.ParsedDocument;
7272
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
7373
import org.elasticsearch.index.mapper.SourceFieldMapper;
@@ -1183,7 +1183,7 @@ private boolean mayHaveBeenIndexedBefore(Index index) {
11831183
return mayHaveBeenIndexBefore;
11841184
}
11851185

1186-
private void addDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
1186+
private void addDocs(final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
11871187
if (docs.size() > 1) {
11881188
indexWriter.addDocuments(docs);
11891189
} else {
@@ -1192,9 +1192,9 @@ private void addDocs(final List<ParseContext.Document> docs, final IndexWriter i
11921192
numDocAppends.inc(docs.size());
11931193
}
11941194

1195-
private void addStaleDocs(final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
1195+
private void addStaleDocs(final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
11961196
assert softDeleteEnabled : "Add history documents but soft-deletes is disabled";
1197-
for (ParseContext.Document doc : docs) {
1197+
for (LuceneDocument doc : docs) {
11981198
doc.add(softDeletesField); // soft-deleted every document before adding to Lucene
11991199
}
12001200
if (docs.size() > 1) {
@@ -1286,7 +1286,7 @@ private boolean assertDocDoesNotExist(final Index index, final boolean allowDele
12861286
return true;
12871287
}
12881288

1289-
private void updateDocs(final Term uid, final List<ParseContext.Document> docs, final IndexWriter indexWriter) throws IOException {
1289+
private void updateDocs(final Term uid, final List<LuceneDocument> docs, final IndexWriter indexWriter) throws IOException {
12901290
if (softDeleteEnabled) {
12911291
if (docs.size() > 1) {
12921292
indexWriter.softUpdateDocuments(uid, docs, softDeletesField);
@@ -1474,7 +1474,7 @@ private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) throws
14741474
assert tombstone.docs().size() == 1 : "Tombstone doc should have single doc [" + tombstone + "]";
14751475
tombstone.updateSeqID(delete.seqNo(), delete.primaryTerm());
14761476
tombstone.version().setLongValue(plan.versionOfDeletion);
1477-
final ParseContext.Document doc = tombstone.docs().get(0);
1477+
final LuceneDocument doc = tombstone.docs().get(0);
14781478
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null :
14791479
"Delete tombstone document but _tombstone field is not set [" + doc + " ]";
14801480
doc.add(softDeletesField);
@@ -1614,7 +1614,7 @@ private NoOpResult innerNoOp(final NoOp noOp) throws IOException {
16141614
// version field.
16151615
tombstone.version().setLongValue(1L);
16161616
assert tombstone.docs().size() == 1 : "Tombstone should have a single doc [" + tombstone + "]";
1617-
final ParseContext.Document doc = tombstone.docs().get(0);
1617+
final LuceneDocument doc = tombstone.docs().get(0);
16181618
assert doc.getField(SeqNoFieldMapper.TOMBSTONE_NAME) != null
16191619
: "Noop tombstone document but _tombstone field is not set [" + doc + " ]";
16201620
doc.add(softDeletesField);

server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -328,7 +328,7 @@ int getMaxInputLength() {
328328
* "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT }
329329
*
330330
* Indexing:
331-
* if context mappings are defined, delegates to {@link ContextMappings#addField(ParseContext.Document, String, String, int, Map)}
331+
* if context mappings are defined, delegates to {@link ContextMappings#addField(LuceneDocument, String, String, int, Map)}
332332
* else adds inputs as a {@link org.apache.lucene.search.suggest.document.SuggestField}
333333
*/
334334
@Override

0 commit comments

Comments
 (0)