Skip to content

Commit db639a8

Browse files
Merge remote-tracking branch 'elastic/master' into more-snapshot-resiliency-testing-2
2 parents af608cc + e24fd1b commit db639a8

File tree

11 files changed

+424
-32
lines changed

11 files changed

+424
-32
lines changed

distribution/archives/build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, String pla
8484
rename { 'LICENSE.txt' }
8585
}
8686

87-
with noticeFile
87+
with noticeFile(oss, jdk)
8888
into('modules') {
8989
with modulesFiles
9090
}

distribution/build.gradle

Lines changed: 23 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -52,11 +52,17 @@ task buildServerNotice(type: NoticeTask) {
5252
// other distributions include notices from modules as well, which are added below later
5353
task buildDefaultNotice(type: NoticeTask) {
5454
licensesDir new File(project(':server').projectDir, 'licenses')
55+
licensesDir new File(project(':distribution').projectDir, 'licenses')
5556
}
56-
57-
// other distributions include notices from modules as well, which are added below later
5857
task buildOssNotice(type: NoticeTask) {
5958
licensesDir new File(project(':server').projectDir, 'licenses')
59+
licensesDir new File(project(':distribution').projectDir, 'licenses')
60+
}
61+
task buildDefaultNoJdkNotice(type: NoticeTask) {
62+
licensesDir new File(project(':server').projectDir, 'licenses')
63+
}
64+
task buildOssNoJdkNotice(type: NoticeTask) {
65+
licensesDir new File(project(':server').projectDir, 'licenses')
6066
}
6167

6268
/*****************************************************************************
@@ -377,11 +383,21 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
377383
}
378384
}
379385

380-
noticeFile = copySpec {
381-
if (project.name == 'integ-test-zip') {
382-
from buildServerNotice
383-
} else {
384-
from buildDefaultNotice
386+
noticeFile = { oss, jdk ->
387+
copySpec {
388+
if (project.name == 'integ-test-zip') {
389+
from buildServerNotice
390+
} else {
391+
if (oss && jdk) {
392+
from buildOssNotice
393+
} else if (oss) {
394+
from buildOssNoJdkNotice
395+
} else if (jdk) {
396+
from buildDefaultNotice
397+
} else {
398+
from buildDefaultNoJdkNotice
399+
}
400+
}
385401
}
386402
}
387403

distribution/licenses/openjdk-LICENSE.txt

Lines changed: 347 additions & 0 deletions
Large diffs are not rendered by default.
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Copyright (c) 1995, 2013, Oracle and/or its affiliates.
2+
3+
OpenJDK is licensed under the GPLv2+CE. A copy of that license is included in
4+
this distribution immediately below this notice. You can find a copy of the
5+
OpenJDK source through the downloads page at https://elastic.co.

distribution/packages/build.gradle

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,9 @@ Closure commonPackageConfig(String type, boolean oss, boolean jdk) {
260260

261261
// the oss package conflicts with the default distribution and vice versa
262262
conflicts('elasticsearch' + (oss ? '' : '-oss'))
263+
264+
into '/usr/share/elasticsearch'
265+
with noticeFile(oss, jdk)
263266
}
264267
}
265268

@@ -294,7 +297,6 @@ ospackage {
294297
permissionGroup 'root'
295298

296299
into '/usr/share/elasticsearch'
297-
with noticeFile
298300
}
299301

300302
Closure commonDebConfig(boolean oss, boolean jdk) {

docs/python/index.asciidoc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,8 @@ The recommended way to set your requirements in your `setup.py` or
5858

5959
[source,txt]
6060
------------------------------------
61-
# Elasticsearch 5.x
62-
elasticsearch>=5.0.0,<6.0.0
61+
# Elasticsearch 6.x
62+
elasticsearch>=6.0.0,<7.0.0
6363
6464
# Elasticsearch 2.x
6565
elasticsearch2

docs/reference/docs/reindex.asciidoc

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,8 +118,11 @@ POST _reindex
118118
// CONSOLE
119119
// TEST[setup:twitter]
120120

121-
By default, version conflicts abort the `_reindex` process, but you can just
122-
count them by setting `"conflicts": "proceed"` in the request body:
121+
By default, version conflicts abort the `_reindex` process. The `"conflicts"` request body
122+
parameter can be used to instruct `_reindex` to proceed with the next document on version conflicts.
123+
It is important to note that the handling of other error types is unaffected by the `"conflicts"` parameter.
124+
When `"conflicts": "proceed"` is set in the request body, the `_reindex` process will continue on version conflicts
125+
and return a count of version conflicts encountered:
123126

124127
[source,js]
125128
--------------------------------------------------

docs/reference/ml/transforms.asciidoc

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ PUT _ml/datafeeds/datafeed-test1
134134
"total_error_count": { <2>
135135
"script": {
136136
"lang": "expression",
137-
"inline": "doc['error_count'].value + doc['aborted_count'].value"
137+
"source": "doc['error_count'].value + doc['aborted_count'].value"
138138
}
139139
}
140140
}
@@ -239,7 +239,7 @@ PUT _ml/datafeeds/datafeed-test2
239239
"my_script_field": {
240240
"script": {
241241
"lang": "painless",
242-
"inline": "doc['some_field'].value + '_' + doc['another_field'].value" <2>
242+
"source": "doc['some_field'].value + '_' + doc['another_field'].value" <2>
243243
}
244244
}
245245
}
@@ -276,7 +276,7 @@ POST _ml/datafeeds/datafeed-test2/_update
276276
"my_script_field": {
277277
"script": {
278278
"lang": "painless",
279-
"inline": "doc['another_field'].value.trim()" <1>
279+
"source": "doc['another_field'].value.trim()" <1>
280280
}
281281
}
282282
}
@@ -312,7 +312,7 @@ POST _ml/datafeeds/datafeed-test2/_update
312312
"my_script_field": {
313313
"script": {
314314
"lang": "painless",
315-
"inline": "doc['some_field'].value.toLowerCase()" <1>
315+
"source": "doc['some_field'].value.toLowerCase()" <1>
316316
}
317317
}
318318
}
@@ -349,7 +349,7 @@ POST _ml/datafeeds/datafeed-test2/_update
349349
"my_script_field": {
350350
"script": {
351351
"lang": "painless",
352-
"inline": "doc['some_field'].value.substring(0, 1).toUpperCase() + doc['some_field'].value.substring(1).toLowerCase()" <1>
352+
"source": "doc['some_field'].value.substring(0, 1).toUpperCase() + doc['some_field'].value.substring(1).toLowerCase()" <1>
353353
}
354354
}
355355
}
@@ -386,7 +386,7 @@ POST _ml/datafeeds/datafeed-test2/_update
386386
"my_script_field": {
387387
"script": {
388388
"lang": "painless",
389-
"inline": "/\\s/.matcher(doc['tokenstring2'].value).replaceAll('_')" <1>
389+
"source": "/\\s/.matcher(doc['tokenstring2'].value).replaceAll('_')" <1>
390390
}
391391
}
392392
}
@@ -422,7 +422,7 @@ POST _ml/datafeeds/datafeed-test2/_update
422422
"my_script_field": {
423423
"script": {
424424
"lang": "painless",
425-
"inline": "def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); return m.find() ? m.group(1) + '_' + m.group(2) : '';" <1>
425+
"source": "def m = /(.*)-bar-([0-9][0-9])/.matcher(doc['tokenstring3'].value); return m.find() ? m.group(1) + '_' + m.group(2) : '';" <1>
426426
}
427427
}
428428
}
@@ -554,7 +554,7 @@ PUT _ml/datafeeds/datafeed-test4
554554
"script_fields": {
555555
"my_coordinates": {
556556
"script": {
557-
"inline": "doc['coords.lat'].value + ',' + doc['coords.lon'].value",
557+
"source": "doc['coords.lat'].value + ',' + doc['coords.lon'].value",
558558
"lang": "painless"
559559
}
560560
}

docs/reference/query-dsl/range-query.asciidoc

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,10 @@ GET _search
138138
--------------------------------------------------
139139
// CONSOLE
140140
<1> This date will be converted to `2014-12-31T23:00:00 UTC`.
141-
<2> `now` is not affected by the `time_zone` parameter (dates must be stored as UTC).
141+
<2> `now` is not affected by the `time_zone` parameter, its always the current system time (in UTC).
142+
However, when using <<date-math,date math rounding>> (e.g. down to the nearest day using `now/d`),
143+
the provided `time_zone` will be considered.
144+
142145

143146
[[querying-range-fields]]
144147
==== Querying range fields

docs/reference/query-dsl/script-score-query.asciidoc

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,10 @@ GET /_search
3636
// CONSOLE
3737
// TEST[setup:twitter]
3838

39+
NOTE: The values returned from `script_score` cannot be negative. In general,
40+
Lucene requires the scores produced by queries to be non-negative in order to
41+
support certain search optimizations.
42+
3943
==== Accessing the score of a document within a script
4044

4145
Within a script, you can
@@ -92,17 +96,18 @@ cosine similarity between a given query vector and document vectors.
9296
"match_all": {}
9397
},
9498
"script": {
95-
"source": "cosineSimilarity(params.queryVector, doc['my_dense_vector'])",
99+
"source": "cosineSimilarity(params.query_vector, doc['my_dense_vector']) + 1.0" <1>,
96100
"params": {
97-
"queryVector": [4, 3.4, -0.2] <1>
101+
"query_vector": [4, 3.4, -0.2] <2>
98102
}
99103
}
100104
}
101105
}
102106
}
103107
--------------------------------------------------
104108
// NOTCONSOLE
105-
<1> To take advantage of the script optimizations, provide a query vector as a script parameter.
109+
<1> The script adds 1.0 to the cosine similarity to prevent the score from being negative.
110+
<2> To take advantage of the script optimizations, provide a query vector as a script parameter.
106111

107112
Similarly, for sparse_vector fields, `cosineSimilaritySparse` calculates cosine similarity
108113
between a given query vector and document vectors.
@@ -116,9 +121,9 @@ between a given query vector and document vectors.
116121
"match_all": {}
117122
},
118123
"script": {
119-
"source": "cosineSimilaritySparse(params.queryVector, doc['my_sparse_vector'])",
124+
"source": "cosineSimilaritySparse(params.query_vector, doc['my_sparse_vector']) + 1.0",
120125
"params": {
121-
"queryVector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0}
126+
"query_vector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0}
122127
}
123128
}
124129
}
@@ -139,9 +144,12 @@ dot product between a given query vector and document vectors.
139144
"match_all": {}
140145
},
141146
"script": {
142-
"source": "dotProduct(params.queryVector, doc['my_dense_vector'])",
147+
"source": """
148+
double value = dotProduct(params.query_vector, doc['my_vector']);
149+
return sigmoid(1, Math.E, -value); <1>
150+
""",
143151
"params": {
144-
"queryVector": [4, 3.4, -0.2]
152+
"query_vector": [4, 3.4, -0.2]
145153
}
146154
}
147155
}
@@ -150,6 +158,8 @@ dot product between a given query vector and document vectors.
150158
--------------------------------------------------
151159
// NOTCONSOLE
152160

161+
<1> Using the standard sigmoid function prevents scores from being negative.
162+
153163
Similarly, for sparse_vector fields, `dotProductSparse` calculates dot product
154164
between a given query vector and document vectors.
155165

@@ -162,9 +172,12 @@ between a given query vector and document vectors.
162172
"match_all": {}
163173
},
164174
"script": {
165-
"source": "dotProductSparse(params.queryVector, doc['my_sparse_vector'])",
166-
"params": {
167-
"queryVector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0}
175+
"source": """
176+
double value = dotProductSparse(params.query_vector, doc['my_sparse_vector']);
177+
return sigmoid(1, Math.E, -value);
178+
""",
179+
"params": {
180+
"query_vector": {"2": 0.5, "10" : 111.3, "50": -1.3, "113": 14.8, "4545": 156.0}
168181
}
169182
}
170183
}

0 commit comments

Comments
 (0)