Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,10 @@ public static List<MappedNameValue> decodeAsMap(BytesRef value) throws IOExcepti
List<NameValue> nameValues = decode(value);
List<MappedNameValue> mappedValues = new ArrayList<>(nameValues.size());
for (var nameValue : nameValues) {
mappedValues.add(nameValueToMapped(nameValue));
MappedNameValue mappedNameValue = nameValueToMapped(nameValue);
if (mappedNameValue != null) {
mappedValues.add(mappedNameValue);
}
}
return mappedValues;
}
Expand Down Expand Up @@ -351,6 +354,9 @@ public void writeIgnoredFields(Collection<NameValue> ignoredFieldValues) {
public BytesRef filterValue(BytesRef value, Function<Map<String, Object>, Map<String, Object>> filter) throws IOException {
// for _ignored_source, parse, filter out the field and its contents, and serialize back downstream
IgnoredSourceFieldMapper.MappedNameValue mappedNameValue = LegacyIgnoredSourceEncoding.decodeAsMap(value);
if (mappedNameValue == null) {
return null;
}
Map<String, Object> transformedField = filter.apply(mappedNameValue.map());
if (transformedField.isEmpty()) {
// All values were filtered
Expand Down Expand Up @@ -554,6 +560,9 @@ public MappedNameValue withMap(Map<String, Object> map) {
}

private static MappedNameValue nameValueToMapped(NameValue nameValue) throws IOException {
if (nameValue.hasValue() == false) {
return null;
}
XContentBuilder xContentBuilder = XContentBuilder.builder(XContentDataHelper.getXContentType(nameValue.value()).xContent());
xContentBuilder.startObject().field(nameValue.name());
XContentDataHelper.decodeAndWrite(xContentBuilder, nameValue.value());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -377,6 +377,31 @@ public void testEncodeArrayToMapAndDecode() throws IOException {
assertEquals(bytes, IgnoredSourceFieldMapper.CoalescedIgnoredSourceEncoding.encodeFromMap(List.of(mappedNameValue)));
}

public void testCoalescedDecodeAsMapReturnsNullForVoidEntry() throws IOException {
final IgnoredSourceFieldMapper.NameValue voidNameValue = new IgnoredSourceFieldMapper.NameValue(
"target_field",
0,
XContentDataHelper.voidValue(),
null
);
final List<IgnoredSourceFieldMapper.MappedNameValue> mappedNameValues = IgnoredSourceFieldMapper.CoalescedIgnoredSourceEncoding
.decodeAsMap(IgnoredSourceFieldMapper.CoalescedIgnoredSourceEncoding.encode(List.of(voidNameValue)));
assertEquals(0, mappedNameValues.size());
}

public void testLegacyDecodeAsMapReturnsNullForVoidEntry() throws IOException {
final IgnoredSourceFieldMapper.NameValue voidNameValue = new IgnoredSourceFieldMapper.NameValue(
"target_field",
0,
XContentDataHelper.voidValue(),
null
);
final IgnoredSourceFieldMapper.MappedNameValue mappedNameValue = IgnoredSourceFieldMapper.LegacyIgnoredSourceEncoding.decodeAsMap(
IgnoredSourceFieldMapper.LegacyIgnoredSourceEncoding.encode(voidNameValue)
);
assertNull(mappedNameValue);
}

public void testMultipleIgnoredFieldsRootObject() throws IOException {
boolean booleanValue = randomBoolean();
int intValue = randomInt();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1677,6 +1677,81 @@ public void testProducesStoredFieldsReader() throws Exception {
IOUtils.close(ir, iw, dir);
}

public void testSyntheticSourceWithCopyToAndFLSCoalesced() throws Exception {
final DocumentMapper mapper = createMapperService(
Settings.builder().put("index.mapping.source.mode", "synthetic").build(),
mapping(b -> {
b.startObject("user").field("type", "keyword").field("copy_to", "catch_all").endObject();
b.startObject("domain").field("type", "keyword").field("copy_to", "catch_all").endObject();
b.startObject("catch_all").field("type", "text").endObject();
})
).documentMapper();

try (Directory directory = newDirectory()) {
final IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig());
final ParsedDocument doc = mapper.parse(source(b -> {
b.field("user", "darth.vader");
b.field("domain", "empire.gov");
}));
writer.addDocuments(doc.docs());
writer.commit();

final Automaton automaton = Automatons.patterns(Arrays.asList("user", "domain", IgnoredSourceFieldMapper.NAME));
try (
DirectoryReader reader = FieldSubsetReader.wrap(
DirectoryReader.open(writer),
new CharacterRunAutomaton(automaton),
IgnoredSourceFieldMapper.IgnoredSourceFormat.COALESCED_SINGLE_IGNORED_SOURCE,
(fieldName) -> false
);
) {
assertEquals(
"{\"domain\":\"empire.gov\",\"user\":\"darth.vader\"}",
syntheticSource(mapper, reader, doc.docs().size() - 1)
);
}
IOUtils.close(writer, directory);
}
}

public void testSyntheticSourceWithCopyToAndFLSLegacy() throws Exception {
final DocumentMapper mapper = createMapperService(
IndexVersions.MATCH_ONLY_TEXT_STORED_AS_BYTES, // before IGNORED_SOURCE_COALESCED_ENTRIES_WITH_FF
Settings.builder().put("index.mapping.source.mode", "synthetic").build(),
mapping(b -> {
b.startObject("user").field("type", "keyword").field("copy_to", "catch_all").endObject();
b.startObject("domain").field("type", "keyword").field("copy_to", "catch_all").endObject();
b.startObject("catch_all").field("type", "text").endObject();
})
).documentMapper();

try (Directory directory = newDirectory()) {
final IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig());
final ParsedDocument doc = mapper.parse(source(b -> {
b.field("user", "darth.vader");
b.field("domain", "empire.gov");
}));
writer.addDocuments(doc.docs());
writer.commit();

final Automaton automaton = Automatons.patterns(Arrays.asList("user", "domain", IgnoredSourceFieldMapper.NAME));
try (
DirectoryReader reader = FieldSubsetReader.wrap(
DirectoryReader.open(writer),
new CharacterRunAutomaton(automaton),
IgnoredSourceFieldMapper.IgnoredSourceFormat.LEGACY_SINGLE_IGNORED_SOURCE,
(fieldName) -> false
);
) {
assertEquals(
"{\"domain\":\"empire.gov\",\"user\":\"darth.vader\"}",
syntheticSource(mapper, reader, doc.docs().size() - 1)
);
}
IOUtils.close(writer, directory);
}
}

private static final String DOC_TEST_ITEM = """
{
"field_text" : "text",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -624,3 +624,135 @@ Field with ignored_malformed:
- is_false: "hits.hits.0._source.secret"
- match: { hits.hits.1._source.name: B }
- is_false: "hits.hits.1._source.secret"

---
Fields with copy_to, field level security and synthetic source:
- do:
indices.create:
index: test-fls-copy-to-synthetic
body:
settings:
index:
mapping.source.mode: synthetic
mappings:
properties:
user:
type: keyword
copy_to: catch_all
domain:
type: keyword
copy_to: catch_all
catch_all:
type: keyword

- do:
bulk:
index: test-fls-copy-to-synthetic
refresh: true
body:
- '{"create": { }}'
- '{"user": "darth.vader", "domain": "empire.gov"}'
- match: { errors: false }

- do:
security.create_api_key:
body:
name: "test-fls"
expiration: "1d"
role_descriptors:
index_access:
indices:
- names: [ "test-fls-copy-to-synthetic" ]
privileges: [ "read" ]
field_security:
grant: [ "*" ]
except: [ "catch_all" ]
- match: { name: "test-fls" }
- is_true: id
- set:
id: api_key_id
encoded: credentials

# With superuser
- do:
search:
index: test-fls-copy-to-synthetic
- match: { hits.total.value: 1 }
- match: { hits.hits.0._source.user: "darth.vader" }
- match: { hits.hits.0._source.domain: "empire.gov" }
- is_false: "hits.hits.0._source.catch_all"

# With FLS API Key
- do:
headers:
Authorization: "ApiKey ${credentials}"
search:
index: test-fls-copy-to-synthetic
- match: { hits.total.value: 1 }
- match: { hits.hits.0._source.user: "darth.vader" }
- match: { hits.hits.0._source.domain: "empire.gov" }
- is_false: "hits.hits.0._source.catch_all"

---
Fields with copy_to and skip_ignored_source_read workaround:
# Setting skip_ignored_source_read skips _ignored_source during synthetic source reconstruction
- do:
indices.create:
index: test-fls-copy-to-skip-ignored
body:
settings:
index:
mapping.source.mode: synthetic
mapping.synthetic_source.skip_ignored_source_read: true
mappings:
properties:
user:
type: keyword
copy_to: catch_all
domain:
type: keyword
copy_to: catch_all
catch_all:
type: keyword

- do:
bulk:
index: test-fls-copy-to-skip-ignored
refresh: true
body:
- '{"create": { }}'
- '{"user": "luke.skywalker", "domain": "tatooine.org"}'
- match: { errors: false }

- do:
security.create_api_key:
body:
name: "test-fls-skip-ignored"
expiration: "1d"
role_descriptors:
index_access:
indices:
- names: [ "test-fls-copy-to-skip-ignored" ]
privileges: [ "read" ]
field_security:
grant: [ "*" ]
except: [ "catch_all" ]
- match: { name: "test-fls-skip-ignored" }
- is_true: id
- set:
id: api_key_id
encoded: credentials

# With superuser: search succeeds but source may be incomplete since _ignored_source is skipped
- do:
search:
index: test-fls-copy-to-skip-ignored
- match: { hits.total.value: 1 }

# With FLS API Key: search succeeds without crash since _ignored_source VOID entries are not loaded
- do:
headers:
Authorization: "ApiKey ${credentials}"
search:
index: test-fls-copy-to-skip-ignored
- match: { hits.total.value: 1 }
Loading