diff --git a/docs/common-options/date-math/date-math-expressions.asciidoc b/docs/common-options/date-math/date-math-expressions.asciidoc index 0315dd2eac5..075531c08b7 100644 --- a/docs/common-options/date-math/date-math-expressions.asciidoc +++ b/docs/common-options/date-math/date-math-expressions.asciidoc @@ -83,14 +83,28 @@ anchor will be an actual `DateTime`, even after a serialization/deserialization [source,csharp] ---- var date = new DateTime(2015, 05, 05); -Expect("2015-05-05T00:00:00") - .WhenSerializing(date) - .AssertSubject(dateMath => ((IDateMath)dateMath) - .Anchor.Match( - d => d.Should().Be(date), - s => s.Should().BeNull() - ) - ); +---- + +will serialize to + +[source,javascript] +---- +"2015-05-05T00:00:00" +---- + +When the `DateTime` is local or UTC, the time zone information is included. +For example, for a UTC `DateTime` + +[source,csharp] +---- +var utcDate = new DateTime(2015, 05, 05, 0, 0, 0, DateTimeKind.Utc); +---- + +will serialize to + +[source,javascript] +---- +"2015-05-05T00:00:00Z" ---- ==== Complex expressions diff --git a/docs/query-dsl.asciidoc b/docs/query-dsl.asciidoc index f007d3898e2..cd7bbc48004 100644 --- a/docs/query-dsl.asciidoc +++ b/docs/query-dsl.asciidoc @@ -275,6 +275,8 @@ Specialized types of queries that do not fit into other groups * <> +* <> + See the Elasticsearch documentation on {ref_current}/specialized-queries.html[Specialized queries] for more details. :includes-from-dirs: query-dsl/specialized @@ -287,6 +289,8 @@ include::query-dsl/specialized/percolate/percolate-query-usage.asciidoc[] include::query-dsl/specialized/script/script-query-usage.asciidoc[] +include::query-dsl/specialized/script-score/script-score-query-usage.asciidoc[] + [[span-queries]] == Span queries diff --git a/docs/query-dsl/compound/function-score/function-score-query-usage.asciidoc b/docs/query-dsl/compound/function-score/function-score-query-usage.asciidoc index 0a35c4916b3..7cd30fba882 100644 --- a/docs/query-dsl/compound/function-score/function-score-query-usage.asciidoc +++ b/docs/query-dsl/compound/function-score/function-score-query-usage.asciidoc @@ -29,15 +29,36 @@ q .MaxBoost(20.0) .MinScore(1.0) .Functions(f => f - .Exponential(b => b.Field(p => p.NumberOfCommits).Decay(0.5).Origin(1.0).Scale(0.1).Weight(2.1)) + .Exponential(b => b + .Field(p => p.NumberOfCommits) + .Decay(0.5) + .Origin(1.0) + .Scale(0.1) + .Weight(2.1) + .Filter(fi => fi + .Range(r => r + .Field(p => p.NumberOfContributors) + .GreaterThan(10) + ) + ) + ) .GaussDate(b => b.Field(p => p.LastActivity).Origin(DateMath.Now).Decay(0.5).Scale("1d")) - .LinearGeoLocation(b => - b.Field(p => p.LocationPoint).Origin(new GeoLocation(70, -70)).Scale(Distance.Miles(1)).MultiValueMode(MultiValueMode.Average)) + .LinearGeoLocation(b => b + .Field(p => p.LocationPoint) + .Origin(new GeoLocation(70, -70)) + .Scale(Distance.Miles(1)) + .MultiValueMode(MultiValueMode.Average) + ) .FieldValueFactor(b => b.Field(p => p.NumberOfContributors).Factor(1.1).Missing(0.1).Modifier(FieldValueFactorModifier.Square)) .RandomScore(r => r.Seed(1337).Field("_seq_no")) .RandomScore(r => r.Seed("randomstring").Field("_seq_no")) .Weight(1.0) - .ScriptScore(s => s.Script(ss => ss.Source("Math.log(2 + doc['numberOfCommits'].value)"))) + .ScriptScore(s => s + .Script(ss => ss + .Source("Math.log(2 + doc['numberOfCommits'].value)") + ) + .Weight(2) + ) ) ) ---- @@ -57,7 +78,19 @@ new FunctionScoreQuery() MinScore = 1.0, Functions = new List { - new ExponentialDecayFunction { Origin = 1.0, Decay = 0.5, Field = Field(p => p.NumberOfCommits), Scale = 0.1, Weight = 2.1 }, + new ExponentialDecayFunction + { + Origin = 1.0, + Decay = 0.5, + Field = Field(p => p.NumberOfCommits), + Scale = 0.1, + Weight = 2.1, + Filter = new NumericRangeQuery + { + Field = Field(f => f.NumberOfContributors), + GreaterThan = 10 + } + }, new GaussDateDecayFunction { Origin = DateMath.Now, Field = Field(p => p.LastActivity), Decay = 0.5, Scale = TimeSpan.FromDays(1) }, new LinearGeoDecayFunction @@ -72,7 +105,7 @@ new FunctionScoreQuery() new RandomScoreFunction { Seed = 1337, Field = "_seq_no" }, new RandomScoreFunction { Seed = "randomstring", Field = "_seq_no" }, new WeightFunction { Weight = 1.0 }, - new ScriptScoreFunction { Script = new InlineScript("Math.log(2 + doc['numberOfCommits'].value)") } + new ScriptScoreFunction { Script = new InlineScript("Math.log(2 + doc['numberOfCommits'].value)"), Weight = 2.0 } } } ---- @@ -94,7 +127,14 @@ new FunctionScoreQuery() "decay": 0.5 } }, - "weight": 2.1 + "weight": 2.1, + "filter": { + "range": { + "numberOfContributors": { + "gt": 10.0 + } + } + } }, { "gauss": { @@ -145,7 +185,8 @@ new FunctionScoreQuery() "script": { "source": "Math.log(2 + doc['numberOfCommits'].value)" } - } + }, + "weight": 2.0 } ], "max_boost": 20.0, diff --git a/docs/query-dsl/specialized/script-score/script-score-query-usage.asciidoc b/docs/query-dsl/specialized/script-score/script-score-query-usage.asciidoc new file mode 100644 index 00000000000..c99e29fb06a --- /dev/null +++ b/docs/query-dsl/specialized/script-score/script-score-query-usage.asciidoc @@ -0,0 +1,102 @@ +:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/7.0 + +:github: https://github.com/elastic/elasticsearch-net + +:nuget: https://www.nuget.org/packages + +//// +IMPORTANT NOTE +============== +This file has been generated from https://github.com/elastic/elasticsearch-net/tree/master/src/Tests/Tests/QueryDsl/Specialized/ScriptScore/ScriptScoreQueryUsageTests.cs. +If you wish to submit a PR for any spelling mistakes, typos or grammatical errors for this file, +please modify the original csharp file found at the link and submit the PR with that change. Thanks! +//// + +[[script-score-query-usage]] +=== Script Score Query Usage + +A query allowing you to modify the score of documents that are retrieved by a query. +This can be useful if, for example, a score function is computationally expensive and +it is sufficient to compute the score on a filtered set of documents. + +See the Elasticsearch documentation on {ref_current}/query-dsl-script-score-query.html[script_score query] for more details. + +==== Fluent DSL example + +[source,csharp] +---- +q +.ScriptScore(sn => sn + .Name("named_query") + .Boost(1.1) + .Query(qq => qq + .Range(r => r + .Field(f => f.NumberOfCommits) + .GreaterThan(50) + ) + ) + .Script(s => s + .Source(_scriptScoreSource) + .Params(p => p + .Add("origin", 100) + .Add("scale", 10) + .Add("decay", 0.5) + .Add("offset", 0) + ) + ) +) +---- + +==== Object Initializer syntax example + +[source,csharp] +---- +new ScriptScoreQuery +{ + Name = "named_query", + Boost = 1.1, + Query = new NumericRangeQuery + { + Field = Infer.Field(f => f.NumberOfCommits), + GreaterThan = 50 + }, + Script = new InlineScript(_scriptScoreSource) + { + Params = new Dictionary + { + { "origin", 100 }, + { "scale", 10 }, + { "decay", 0.5 }, + { "offset", 0 } + } + }, +} +---- + +[source,javascript] +.Example json output +---- +{ + "script_score": { + "_name": "named_query", + "boost": 1.1, + "query": { + "range": { + "numberOfCommits": { + "gt": 50.0 + } + } + }, + "script": { + "source": "decayNumericLinear(params.origin, params.scale, params.offset, params.decay, doc['numberOfCommits'].value)", + "params": { + "origin": 100, + "scale": 10, + "decay": 0.5, + "offset": 0 + } + } + } +} +---- + diff --git a/src/CodeGeneration/ApiGenerator/Configuration/Overrides/GlobalOverrides.cs b/src/CodeGeneration/ApiGenerator/Configuration/Overrides/GlobalOverrides.cs index 1b83c798db5..66bff45327b 100644 --- a/src/CodeGeneration/ApiGenerator/Configuration/Overrides/GlobalOverrides.cs +++ b/src/CodeGeneration/ApiGenerator/Configuration/Overrides/GlobalOverrides.cs @@ -40,7 +40,8 @@ public class GlobalOverrides : EndpointOverridesBase "copy_settings", //this still needs a PR? "source", // allows the body to be specified as a request param, we do not want to advertise this with a strongly typed method "timestamp", - "_source_include", "_source_exclude" // can be removed once https://github.com/elastic/elasticsearch/pull/41439 is in + "_source_include", "_source_exclude", // can be removed once https://github.com/elastic/elasticsearch/pull/41439 is in + "track_total_hits" }; } } diff --git a/src/CodeGeneration/DocGenerator/StringExtensions.cs b/src/CodeGeneration/DocGenerator/StringExtensions.cs index 0f61e90ff8a..25fe8da0987 100644 --- a/src/CodeGeneration/DocGenerator/StringExtensions.cs +++ b/src/CodeGeneration/DocGenerator/StringExtensions.cs @@ -114,7 +114,8 @@ public static class StringExtensions new []{ new [] {8.2, 18.2}, new [] {8.2, -18.8}, new [] {-8.8, -10.8}, new [] {8.8, 18.2}} }" }, - { "ProjectFilterExpectedJson", "new {term = new {type = new {value = \"project\"}}}" } + { "ProjectFilterExpectedJson", "new {term = new {type = new {value = \"project\"}}}" }, + { "_scriptScoreSource", "\"decayNumericLinear(params.origin, params.scale, params.offset, params.decay, doc['numberOfCommits'].value)\""} }; private static readonly Regex LeadingSpacesAndAsterisk = new Regex(@"^(?[ \t]*\*\s?).*", RegexOptions.Compiled); @@ -213,7 +214,7 @@ public static string RemoveNumberOfLeadingTabsOrSpacesAfterNewline(this string i public static string[] SplitOnNewLines(this string input, StringSplitOptions options) => input.Split(new[] { "\r\n", "\n" }, options); - public static bool TryGetJsonForAnonymousType(this string anonymousTypeString, out string json) + public static bool TryGetJsonForExpressionSyntax(this string anonymousTypeString, out string json) { json = null; diff --git a/src/CodeGeneration/DocGenerator/SyntaxNodeExtensions.cs b/src/CodeGeneration/DocGenerator/SyntaxNodeExtensions.cs index e57c2bf3ea6..28316f49a48 100644 --- a/src/CodeGeneration/DocGenerator/SyntaxNodeExtensions.cs +++ b/src/CodeGeneration/DocGenerator/SyntaxNodeExtensions.cs @@ -72,11 +72,10 @@ public static bool TryGetJsonForSyntaxNode(this SyntaxNode node, out string json json = null; // find the first anonymous object or new object expression - var creationExpressionSyntax = node.DescendantNodes() - .FirstOrDefault(n => n is AnonymousObjectCreationExpressionSyntax || n is ObjectCreationExpressionSyntax); + var syntax = node.DescendantNodes() + .FirstOrDefault(n => n is AnonymousObjectCreationExpressionSyntax || n is ObjectCreationExpressionSyntax || n is LiteralExpressionSyntax); - return creationExpressionSyntax != null && - creationExpressionSyntax.ToFullString().TryGetJsonForAnonymousType(out json); + return syntax != null && syntax.ToFullString().TryGetJsonForExpressionSyntax(out json); } /// diff --git a/src/Elasticsearch.Net/Api/RequestParameters/RequestParameters.NoNamespace.cs b/src/Elasticsearch.Net/Api/RequestParameters/RequestParameters.NoNamespace.cs index d8423db8444..3342d8e688c 100644 --- a/src/Elasticsearch.Net/Api/RequestParameters/RequestParameters.NoNamespace.cs +++ b/src/Elasticsearch.Net/Api/RequestParameters/RequestParameters.NoNamespace.cs @@ -1712,13 +1712,6 @@ public bool? TotalHitsAsInteger set => Q("rest_total_hits_as_int", value); } - ///Indicate if the number of documents that match the query should be tracked - public bool? TrackTotalHits - { - get => Q("track_total_hits"); - set => Q("track_total_hits", value); - } - ///Specify whether aggregation and suggester names should be prefixed by their respective types in the response public bool? TypedKeys { diff --git a/src/Elasticsearch.Net/Connection/Content/RequestDataContent.cs b/src/Elasticsearch.Net/Connection/Content/RequestDataContent.cs index d624e90f4cc..a85afcce514 100644 --- a/src/Elasticsearch.Net/Connection/Content/RequestDataContent.cs +++ b/src/Elasticsearch.Net/Connection/Content/RequestDataContent.cs @@ -25,8 +25,7 @@ namespace Elasticsearch.Net internal class RequestDataContent : HttpContent { private readonly RequestData _requestData; - private readonly Func _onStreamAvailable; - + private readonly Func _onStreamAvailable; public RequestDataContent(RequestData requestData) { @@ -35,12 +34,17 @@ public RequestDataContent(RequestData requestData) if (requestData.HttpCompression) Headers.ContentEncoding.Add("gzip"); - Task OnStreamAvailable(PostData data, Stream stream, HttpContent content, TransportContext context) + Task OnStreamAvailable(RequestData data, Stream stream, HttpContent content, TransportContext context) { + if (data.HttpCompression) + stream = new GZipStream(stream, CompressionMode.Compress, false); + using(stream) - data.Write(stream, requestData.ConnectionSettings); + data.PostData.Write(stream, data.ConnectionSettings); + return Task.CompletedTask; } + _onStreamAvailable = OnStreamAvailable; } public RequestDataContent(RequestData requestData, CancellationToken token) @@ -50,11 +54,15 @@ public RequestDataContent(RequestData requestData, CancellationToken token) if (requestData.HttpCompression) Headers.ContentEncoding.Add("gzip"); - async Task OnStreamAvailable(PostData data, Stream stream, HttpContent content, TransportContext context) + async Task OnStreamAvailable(RequestData data, Stream stream, HttpContent content, TransportContext context) { + if (data.HttpCompression) + stream = new GZipStream(stream, CompressionMode.Compress, false); + using (stream) - await data.WriteAsync(stream, requestData.ConnectionSettings, token).ConfigureAwait(false); + await data.PostData.WriteAsync(stream, data.ConnectionSettings, token).ConfigureAwait(false); } + _onStreamAvailable = OnStreamAvailable; } @@ -69,16 +77,9 @@ async Task OnStreamAvailable(PostData data, Stream stream, HttpContent content, [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Exception is passed as task result.")] protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) { - - var data = _requestData.PostData; - if (data == null) return; - var serializeToStreamTask = new TaskCompletionSource(); - - if (_requestData.HttpCompression) - stream = new GZipStream(stream, CompressionMode.Compress, false); var wrappedStream = new CompleteTaskOnCloseStream(stream, serializeToStreamTask); - await _onStreamAvailable(data, wrappedStream, this, context).ConfigureAwait(false); + await _onStreamAvailable(_requestData, wrappedStream, this, context).ConfigureAwait(false); await serializeToStreamTask.Task.ConfigureAwait(false); } @@ -111,7 +112,6 @@ protected override void Dispose(bool disposing) base.Dispose(); } - public override void Close() => _serializeToStreamTask.TrySetResult(true); } @@ -193,6 +193,8 @@ public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, As public override void EndWrite(IAsyncResult asyncResult) => _innerStream.EndWrite(asyncResult); public override void WriteByte(byte value) => _innerStream.WriteByte(value); + + public override void Close() => _innerStream.Close(); } } } diff --git a/src/Elasticsearch.Net/Connection/HttpConnection.cs b/src/Elasticsearch.Net/Connection/HttpConnection.cs index 734e7e6d3b0..c9b36d209b0 100644 --- a/src/Elasticsearch.Net/Connection/HttpConnection.cs +++ b/src/Elasticsearch.Net/Connection/HttpConnection.cs @@ -57,7 +57,10 @@ public virtual TResponse Request(RequestData requestData) try { var requestMessage = CreateHttpRequestMessage(requestData); - SetContent(requestMessage, requestData); + + if (requestData.PostData != null) + SetContent(requestMessage, requestData); + using(requestMessage?.Content ?? (IDisposable)Stream.Null) using (var d = DiagnosticSource.Diagnose(DiagnosticSources.HttpConnection.SendAndReceiveHeaders, requestData)) { @@ -107,8 +110,11 @@ public virtual async Task RequestAsync(RequestData request try { var requestMessage = CreateHttpRequestMessage(requestData); - SetAsyncContent(requestMessage, requestData, cancellationToken); - using(requestMessage?.Content ?? (IDisposable)Stream.Null) + + if (requestData.PostData != null) + SetAsyncContent(requestMessage, requestData, cancellationToken); + + using(requestMessage?.Content ?? (IDisposable)Stream.Null) using (var d = DiagnosticSource.Diagnose(DiagnosticSources.HttpConnection.SendAndReceiveHeaders, requestData)) { responseMessage = await client.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); diff --git a/src/Elasticsearch.Net/Extensions/StringBuilderCache.cs b/src/Elasticsearch.Net/Extensions/StringBuilderCache.cs new file mode 100644 index 00000000000..873dbd9f8d0 --- /dev/null +++ b/src/Elasticsearch.Net/Extensions/StringBuilderCache.cs @@ -0,0 +1,64 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Text; + +namespace Elasticsearch.Net.Extensions +{ + /// Provide a cached reusable instance of stringbuilder per thread. + internal static class StringBuilderCache + { + private const int DefaultCapacity = 16; // == StringBuilder.DefaultCapacity + + // The value 360 was chosen in discussion with performance experts as a compromise between using + // as little memory per thread as possible and still covering a large part of short-lived + // StringBuilder creations on the startup path of VS designers. + private const int MaxBuilderSize = 360; + + // WARNING: We allow diagnostic tools to directly inspect this member (t_cachedInstance). + // See https://github.com/dotnet/corert/blob/master/Documentation/design-docs/diagnostics/diagnostics-tools-contract.md for more details. + // Please do not change the type, the name, or the semantic usage of this member without understanding the implication for tools. + // Get in touch with the diagnostics team if you have questions. + [ThreadStatic] + private static StringBuilder _cachedInstance; + + /// Get a StringBuilder for the specified capacity. + /// If a StringBuilder of an appropriate size is cached, it will be returned and the cache emptied. + public static StringBuilder Acquire(int capacity = DefaultCapacity) + { + if (capacity <= MaxBuilderSize) + { + var sb = _cachedInstance; + if (sb != null) + { + // Avoid stringbuilder block fragmentation by getting a new StringBuilder + // when the requested size is larger than the current capacity + if (capacity <= sb.Capacity) + { + _cachedInstance = null; + sb.Clear(); + return sb; + } + } + } + + return new StringBuilder(capacity); + } + + /// Place the specified builder in the cache if it is not too big. + public static void Release(StringBuilder sb) + { + if (sb.Capacity <= MaxBuilderSize) _cachedInstance = sb; + } + + /// ToString() the stringbuilder, Release it to the cache, and return the resulting string. + public static string GetStringAndRelease(StringBuilder sb) + { + var result = sb.ToString(); + Release(sb); + return result; + } + } +} diff --git a/src/Nest/Analysis/TokenFilters/WordDelimiterGraph/WordDelimiterGraphTokenFilter.cs b/src/Nest/Analysis/TokenFilters/WordDelimiterGraph/WordDelimiterGraphTokenFilter.cs index 75912495753..67eefd30821 100644 --- a/src/Nest/Analysis/TokenFilters/WordDelimiterGraph/WordDelimiterGraphTokenFilter.cs +++ b/src/Nest/Analysis/TokenFilters/WordDelimiterGraph/WordDelimiterGraphTokenFilter.cs @@ -10,6 +10,13 @@ namespace Nest /// public interface IWordDelimiterGraphTokenFilter : ITokenFilter { + /// + /// By default, the filter tries to output subtokens with adjusted offsets to reflect their actual position in the token stream. However, when used in combination with other filters that alter the length or starting position of tokens without changing their offsets (e.g. ) this can cause tokens with illegal offsets to be emitted. Setting to false will stop from adjusting these internal offsets. + /// + [DataMember(Name ="adjust_offsets")] + [JsonFormatter(typeof(NullableStringBooleanFormatter))] + bool? AdjustOffsets { get; set; } + /// /// If true causes all subword parts to be catenated: "wi-fi-4000" ⇒ "wifi4000". Defaults to false. /// @@ -104,6 +111,9 @@ public class WordDelimiterGraphTokenFilter : TokenFilterBase, IWordDelimiterGrap { public WordDelimiterGraphTokenFilter() : base("word_delimiter_graph") { } + /// + public bool? AdjustOffsets { get; set; } + /// public bool? CatenateAll { get; set; } @@ -149,6 +159,7 @@ public class WordDelimiterGraphTokenFilterDescriptor : TokenFilterDescriptorBase, IWordDelimiterGraphTokenFilter { protected override string Type => "word_delimiter_graph"; + bool? IWordDelimiterGraphTokenFilter.AdjustOffsets { get; set; } bool? IWordDelimiterGraphTokenFilter.CatenateAll { get; set; } bool? IWordDelimiterGraphTokenFilter.CatenateNumbers { get; set; } bool? IWordDelimiterGraphTokenFilter.CatenateWords { get; set; } @@ -179,6 +190,9 @@ public WordDelimiterGraphTokenFilterDescriptor GenerateNumberParts(bool? generat public WordDelimiterGraphTokenFilterDescriptor CatenateNumbers(bool? catenateNumbers = true) => Assign(catenateNumbers, (a, v) => a.CatenateNumbers = v); + /// + public WordDelimiterGraphTokenFilterDescriptor AdjustOffsets(bool? adjustOffsets = true) => Assign(adjustOffsets, (a, v) => a.AdjustOffsets = v); + /// public WordDelimiterGraphTokenFilterDescriptor CatenateAll(bool? catenateAll = true) => Assign(catenateAll, (a, v) => a.CatenateAll = v); diff --git a/src/Nest/CommonOptions/DateMath/DateMath.cs b/src/Nest/CommonOptions/DateMath/DateMath.cs index ee32d7d0b8e..c08436564d5 100644 --- a/src/Nest/CommonOptions/DateMath/DateMath.cs +++ b/src/Nest/CommonOptions/DateMath/DateMath.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Globalization; using System.Text; using System.Text.RegularExpressions; using Elasticsearch.Net.Extensions; @@ -109,21 +110,50 @@ public override string ToString() } /// - /// Formats a to have a minimum of 3 decimal places if there - /// are sub second values + /// Formats a to have a minimum of 3 decimal places if there are sub second values /// - /// Fixes bug in Elasticsearch: https://github.com/elastic/elasticsearch/pull/41871 private static string ToMinThreeDecimalPlaces(DateTime dateTime) { - var format = dateTime.ToString("yyyy-MM-ddTHH:mm:ss.FFFFFFF"); + var builder = StringBuilderCache.Acquire(33); + var format = dateTime.ToString("yyyy'-'MM'-'dd'T'HH':'mm':'ss.FFFFFFF", CultureInfo.InvariantCulture); + builder.Append(format); + // Fixes bug in Elasticsearch: https://github.com/elastic/elasticsearch/pull/41871 if (format.Length > 20 && format.Length < 23) { var diff = 23 - format.Length; - return $"{format}{new string('0', diff)}"; + for (int i = 0; i < diff; i++) + builder.Append('0'); } - return format; + switch (dateTime.Kind) + { + case DateTimeKind.Local: + var offset = TimeZoneInfo.Local.GetUtcOffset(dateTime); + if (offset >= TimeSpan.Zero) + builder.Append('+'); + else + { + builder.Append('-'); + offset = offset.Negate(); + } + + AppendTwoDigitNumber(builder, offset.Hours); + builder.Append(':'); + AppendTwoDigitNumber(builder, offset.Minutes); + break; + case DateTimeKind.Utc: + builder.Append('Z'); + break; + } + + return StringBuilderCache.GetStringAndRelease(builder); + } + + private static void AppendTwoDigitNumber(StringBuilder result, int val) + { + result.Append((char)('0' + (val / 10))); + result.Append((char)('0' + (val % 10))); } } diff --git a/src/Nest/Descriptors.NoNamespace.cs b/src/Nest/Descriptors.NoNamespace.cs index e746e4bf907..fa302f6708f 100644 --- a/src/Nest/Descriptors.NoNamespace.cs +++ b/src/Nest/Descriptors.NoNamespace.cs @@ -1300,8 +1300,6 @@ public SearchDescriptor Index() public SearchDescriptor SuggestText(string suggesttext) => Qs("suggest_text", suggesttext); ///Indicates whether hits.total should be rendered as an integer or an object in the rest search response public SearchDescriptor TotalHitsAsInteger(bool? totalhitsasinteger = true) => Qs("rest_total_hits_as_int", totalhitsasinteger); - ///Indicate if the number of documents that match the query should be tracked - public SearchDescriptor TrackTotalHits(bool? tracktotalhits = true) => Qs("track_total_hits", tracktotalhits); ///Specify whether aggregation and suggester names should be prefixed by their respective types in the response public SearchDescriptor TypedKeys(bool? typedkeys = true) => Qs("typed_keys", typedkeys); } diff --git a/src/Nest/Modules/SnapshotAndRestore/Snapshot/SnapshotStatus/SnapshotStatusResponse.cs b/src/Nest/Modules/SnapshotAndRestore/Snapshot/SnapshotStatus/SnapshotStatusResponse.cs index 45ad34018f1..2c168d40c60 100644 --- a/src/Nest/Modules/SnapshotAndRestore/Snapshot/SnapshotStatus/SnapshotStatusResponse.cs +++ b/src/Nest/Modules/SnapshotAndRestore/Snapshot/SnapshotStatus/SnapshotStatusResponse.cs @@ -85,10 +85,25 @@ public class SnapshotShardsStats public class SnapshotStats { + [DataMember(Name ="incremental")] + public FileCountSnapshotStats Incremental { get; internal set; } + + [DataMember(Name ="total")] + public FileCountSnapshotStats Total { get; internal set; } + [DataMember(Name ="start_time_in_millis")] public long StartTimeInMilliseconds { get; internal set; } [DataMember(Name ="time_in_millis")] public long TimeInMilliseconds { get; internal set; } } + + public class FileCountSnapshotStats + { + [DataMember(Name ="file_count")] + public int FileCount { get; internal set; } + + [DataMember(Name ="size_in_bytes")] + public long SizeInBytes { get; internal set; } + } } diff --git a/src/Nest/QueryDsl/Abstractions/Container/IQueryContainer.cs b/src/Nest/QueryDsl/Abstractions/Container/IQueryContainer.cs index 0a28e8d6181..81b7aa43690 100644 --- a/src/Nest/QueryDsl/Abstractions/Container/IQueryContainer.cs +++ b/src/Nest/QueryDsl/Abstractions/Container/IQueryContainer.cs @@ -115,6 +115,10 @@ public interface IQueryContainer [DataMember(Name ="script")] IScriptQuery Script { get; set; } + /// + [DataMember(Name ="script_score")] + IScriptScoreQuery ScriptScore { get; set; } + [DataMember(Name ="simple_query_string")] ISimpleQueryStringQuery SimpleQueryString { get; set; } diff --git a/src/Nest/QueryDsl/Abstractions/Container/QueryContainer-Assignments.cs b/src/Nest/QueryDsl/Abstractions/Container/QueryContainer-Assignments.cs index 15024c1fa55..0481cd3efeb 100644 --- a/src/Nest/QueryDsl/Abstractions/Container/QueryContainer-Assignments.cs +++ b/src/Nest/QueryDsl/Abstractions/Container/QueryContainer-Assignments.cs @@ -38,6 +38,7 @@ public partial class QueryContainer : IQueryContainer, IDescriptor private IRawQuery _raw; private IRegexpQuery _regexp; private IScriptQuery _script; + private IScriptScoreQuery _scriptScore; private ISimpleQueryStringQuery _simpleQueryString; private ISpanContainingQuery _spanContaining; private ISpanFieldMaskingQuery _spanFieldMasking; @@ -251,6 +252,12 @@ IScriptQuery IQueryContainer.Script set => _script = Set(value); } + IScriptScoreQuery IQueryContainer.ScriptScore + { + get => _scriptScore; + set => _scriptScore = Set(value); + } + ISimpleQueryStringQuery IQueryContainer.SimpleQueryString { get => _simpleQueryString; diff --git a/src/Nest/QueryDsl/Abstractions/Container/QueryContainerDescriptor.cs b/src/Nest/QueryDsl/Abstractions/Container/QueryContainerDescriptor.cs index 2cd57ad0b19..ef57d8f59ae 100644 --- a/src/Nest/QueryDsl/Abstractions/Container/QueryContainerDescriptor.cs +++ b/src/Nest/QueryDsl/Abstractions/Container/QueryContainerDescriptor.cs @@ -437,6 +437,9 @@ public QueryContainer FunctionScore(Func, IFunct public QueryContainer Script(Func, IScriptQuery> selector) => WrapInContainer(selector, (query, container) => container.Script = query); + public QueryContainer ScriptScore(Func, IScriptScoreQuery> selector) => + WrapInContainer(selector, (query, container) => container.ScriptScore = query); + public QueryContainer Exists(Func, IExistsQuery> selector) => WrapInContainer(selector, (query, container) => container.Exists = query); diff --git a/src/Nest/QueryDsl/Compound/FunctionScore/Functions/ScoreFunctionJsonFormatter.cs b/src/Nest/QueryDsl/Compound/FunctionScore/Functions/ScoreFunctionJsonFormatter.cs index 71ca0ec758f..85167ca50f8 100644 --- a/src/Nest/QueryDsl/Compound/FunctionScore/Functions/ScoreFunctionJsonFormatter.cs +++ b/src/Nest/QueryDsl/Compound/FunctionScore/Functions/ScoreFunctionJsonFormatter.cs @@ -158,8 +158,11 @@ public void Serialize(ref JsonWriter writer, IScoreFunction value, IJsonFormatte private static void WriteScriptScore(ref JsonWriter writer, IScriptScoreFunction value, IJsonFormatterResolver formatterResolver) { writer.WritePropertyName("script_score"); - var scriptFormatter = formatterResolver.GetFormatter(); - scriptFormatter.Serialize(ref writer, value, formatterResolver); + writer.WriteBeginObject(); + writer.WritePropertyName("script"); + var scriptFormatter = formatterResolver.GetFormatter(); + scriptFormatter.Serialize(ref writer, value?.Script, formatterResolver); + writer.WriteEndObject(); } private static void WriteRandomScore(ref JsonWriter writer, IRandomScoreFunction value, IJsonFormatterResolver formatterResolver) diff --git a/src/Nest/QueryDsl/Geo/WKT/GeoWKTReader.cs b/src/Nest/QueryDsl/Geo/WKT/GeoWKTReader.cs index 924304b2294..b5f551b26cc 100644 --- a/src/Nest/QueryDsl/Geo/WKT/GeoWKTReader.cs +++ b/src/Nest/QueryDsl/Geo/WKT/GeoWKTReader.cs @@ -259,14 +259,14 @@ private static TokenType NextCloserOrComma(WellKnownTextTokenizer tokenizer) private static double NextNumber(WellKnownTextTokenizer tokenizer) { - if (tokenizer.NextToken() == TokenType.Number) + if (tokenizer.NextToken() == TokenType.Word) { if (string.Equals(tokenizer.TokenValue, WellKnownTextTokenizer.NaN, StringComparison.OrdinalIgnoreCase)) return double.NaN; if (double.TryParse( tokenizer.TokenValue, - NumberStyles.AllowDecimalPoint | NumberStyles.AllowLeadingSign, + NumberStyles.AllowDecimalPoint | NumberStyles.AllowLeadingSign | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var d)) return d; } @@ -278,7 +278,7 @@ private static double NextNumber(WellKnownTextTokenizer tokenizer) private static bool IsNumberNext(WellKnownTextTokenizer tokenizer) { var token = tokenizer.PeekToken(); - return token == TokenType.Number; + return token == TokenType.Word; } } @@ -288,7 +288,6 @@ private static bool IsNumberNext(WellKnownTextTokenizer tokenizer) internal enum CharacterType : byte { Whitespace, - Digit, Alpha, Comment } @@ -300,7 +299,6 @@ internal enum TokenType : byte { None, Word, - Number, LParen, RParen, Comma @@ -339,15 +337,14 @@ static WellKnownTextTokenizer() // build a map of ASCII chars and their types // Any unmapped ASCII will be considered whitespace // and anything > 0 outside of ASCII will be considered alpha. - // Treat + - and . as digit characters to make parsing numbers easier. Chars('a', 'z', CharacterType.Alpha); Chars('A', 'Z', CharacterType.Alpha); Chars(128 + 32, 255, CharacterType.Alpha); - Chars('0', '9', CharacterType.Digit); + Chars('0', '9', CharacterType.Alpha); Chars(LParen, RParen, CharacterType.Alpha); - Chars(Plus, Plus, CharacterType.Digit); + Chars(Plus, Plus, CharacterType.Alpha); Chars(Comma, Comma, CharacterType.Alpha); - Chars(Minus, Dot, CharacterType.Digit); + Chars(Minus, Dot, CharacterType.Alpha); Chars(Comment, Comment, CharacterType.Comment); } @@ -399,7 +396,6 @@ public string TokenString() switch (TokenType) { case TokenType.Word: - case TokenType.Number: return TokenValue; case TokenType.None: return "END-OF-STREAM"; @@ -514,33 +510,6 @@ public TokenType NextToken() { var i = 0; - do - { - _buffer.Insert(i++, (char)c); - c = Read(); - - if (c < 0) - characterType = CharacterType.Whitespace; - else if (c < CharacterTypesLength) - characterType = CharacterTypes[c]; - else - characterType = CharacterType.Alpha; - } while (characterType == CharacterType.Alpha); - - _peekChar = c; - TokenValue = new string(_buffer.ToArray(), 0, i); - - // special case for NaN - if (string.Equals(TokenValue, NaN, StringComparison.OrdinalIgnoreCase)) - return TokenType = TokenType.Number; - - return TokenType = TokenType.Word; - } - - if (characterType == CharacterType.Digit) - { - var i = 0; - var dots = 0; do { _buffer.Insert(i++, (char)c); @@ -550,20 +519,19 @@ public TokenType NextToken() characterType = CharacterType.Whitespace; else if (c < CharacterTypesLength) { + if (c == LParen || c == RParen || c == Comma) + break; + characterType = CharacterTypes[c]; - if (c == Dot) - dots++; } else characterType = CharacterType.Alpha; - } while (characterType == CharacterType.Digit); + } while (characterType == CharacterType.Alpha); _peekChar = c; TokenValue = new string(_buffer.ToArray(), 0, i); - return dots > 1 - ? TokenType = TokenType.Word - : TokenType = TokenType.Number; + return TokenType = TokenType.Word; } if (characterType == CharacterType.Comment) diff --git a/src/Nest/QueryDsl/Query.cs b/src/Nest/QueryDsl/Query.cs index 048f1f41716..9917545ad76 100644 --- a/src/Nest/QueryDsl/Query.cs +++ b/src/Nest/QueryDsl/Query.cs @@ -117,6 +117,10 @@ public static QueryContainer Regexp(Func, IRegexpQuery> public static QueryContainer Script(Func, IScriptQuery> selector) => new QueryContainerDescriptor().Script(selector); + /// + public static QueryContainer ScriptScore(Func, IScriptScoreQuery> selector) => + new QueryContainerDescriptor().ScriptScore(selector); + public static QueryContainer SimpleQueryString(Func, ISimpleQueryStringQuery> selector) => new QueryContainerDescriptor().SimpleQueryString(selector); diff --git a/src/Nest/QueryDsl/Specialized/ScriptScore/ScriptScoreQuery.cs b/src/Nest/QueryDsl/Specialized/ScriptScore/ScriptScoreQuery.cs new file mode 100644 index 00000000000..b0e8d73bb1d --- /dev/null +++ b/src/Nest/QueryDsl/Specialized/ScriptScore/ScriptScoreQuery.cs @@ -0,0 +1,77 @@ +using System; +using System.Runtime.Serialization; +using Elasticsearch.Net.Utf8Json; + +namespace Nest +{ + /// + /// A query allowing you to modify the score of documents that are retrieved by a query. + /// This can be useful if, for example, a score function is computationally expensive and it is sufficient to + /// compute the score on a filtered set of documents. + /// + [ReadAs(typeof(ScriptScoreQuery))] + [InterfaceDataContract] + public interface IScriptScoreQuery : IQuery + { + /// + /// The query to execute + /// + [DataMember(Name = "query")] + QueryContainer Query { get; set; } + + /// + /// The script to execute + /// + [DataMember(Name = "script")] + IScript Script { get; set; } + } + + /// + public class ScriptScoreQuery : QueryBase, IScriptScoreQuery + { + /// + public QueryContainer Query { get; set; } + + /// + public IScript Script { get; set; } + + protected override bool Conditionless => IsConditionless(this); + + internal override void InternalWrapInContainer(IQueryContainer c) => c.ScriptScore = this; + + internal static bool IsConditionless(IScriptScoreQuery q) + { + if (q.Script == null || q.Query.IsConditionless()) + return true; + + switch (q.Script) + { + case IInlineScript inlineScript: + return inlineScript.Source.IsNullOrEmpty(); + case IIndexedScript indexedScript: + return indexedScript.Id.IsNullOrEmpty(); + } + + return false; + } + } + + /// + public class ScriptScoreQueryDescriptor + : QueryDescriptorBase, IScriptScoreQuery> + , IScriptScoreQuery where T : class + { + protected override bool Conditionless => ScriptScoreQuery.IsConditionless(this); + QueryContainer IScriptScoreQuery.Query { get; set; } + + IScript IScriptScoreQuery.Script { get; set; } + + /// + public ScriptScoreQueryDescriptor Query(Func, QueryContainer> selector) => + Assign(selector, (a, v) => a.Query = v?.Invoke(new QueryContainerDescriptor())); + + /// + public ScriptScoreQueryDescriptor Script(Func selector) => + Assign(selector, (a, v) => a.Script = v?.Invoke(new ScriptDescriptor())); + } +} diff --git a/src/Nest/QueryDsl/Visitor/DslPrettyPrintVisitor.cs b/src/Nest/QueryDsl/Visitor/DslPrettyPrintVisitor.cs index b06a76780e7..68f0c599872 100644 --- a/src/Nest/QueryDsl/Visitor/DslPrettyPrintVisitor.cs +++ b/src/Nest/QueryDsl/Visitor/DslPrettyPrintVisitor.cs @@ -187,6 +187,8 @@ public virtual void Visit(IGeoShapeQuery query) public virtual void Visit(IScriptQuery query) => Write("script"); + public virtual void Visit(IScriptScoreQuery query) => Write("script_score"); + public virtual void Visit(IRawQuery query) => Write("raw"); public virtual void Visit(IPercolateQuery query) => Write("percolate"); diff --git a/src/Nest/QueryDsl/Visitor/QueryVisitor.cs b/src/Nest/QueryDsl/Visitor/QueryVisitor.cs index dd5f757cb9e..492f2bc7bae 100644 --- a/src/Nest/QueryDsl/Visitor/QueryVisitor.cs +++ b/src/Nest/QueryDsl/Visitor/QueryVisitor.cs @@ -86,6 +86,8 @@ public interface IQueryVisitor void Visit(IScriptQuery query); + void Visit(IScriptScoreQuery query); + void Visit(IGeoPolygonQuery query); void Visit(IGeoDistanceQuery query); @@ -237,6 +239,8 @@ public virtual void Visit(ITermsQuery query) { } public virtual void Visit(IScriptQuery query) { } + public virtual void Visit(IScriptScoreQuery query) { } + public virtual void Visit(IGeoPolygonQuery query) { } public virtual void Visit(IGeoDistanceQuery query) { } diff --git a/src/Nest/QueryDsl/Visitor/QueryWalker.cs b/src/Nest/QueryDsl/Visitor/QueryWalker.cs index dc7e8ab7834..0af0664ffec 100644 --- a/src/Nest/QueryDsl/Visitor/QueryWalker.cs +++ b/src/Nest/QueryDsl/Visitor/QueryWalker.cs @@ -44,6 +44,7 @@ public void Walk(IQueryContainer qd, IQueryVisitor visitor) VisitQuery(qd.MatchPhrase, visitor, (v, d) => v.Visit(d)); VisitQuery(qd.MatchPhrasePrefix, visitor, (v, d) => v.Visit(d)); VisitQuery(qd.Script, visitor, (v, d) => v.Visit(d)); + VisitQuery(qd.ScriptScore, visitor, (v, d) => v.Visit(d)); VisitQuery(qd.Exists, visitor, (v, d) => v.Visit(d)); VisitQuery(qd.GeoPolygon, visitor, (v, d) => v.Visit(d)); VisitQuery(qd.GeoDistance, visitor, (v, d) => v.Visit(d)); diff --git a/src/Nest/Requests.NoNamespace.cs b/src/Nest/Requests.NoNamespace.cs index 10dfd363dc7..124b6cc4ee6 100644 --- a/src/Nest/Requests.NoNamespace.cs +++ b/src/Nest/Requests.NoNamespace.cs @@ -2809,13 +2809,6 @@ public bool? TotalHitsAsInteger set => Q("rest_total_hits_as_int", value); } - ///Indicate if the number of documents that match the query should be tracked - public bool? TrackTotalHits - { - get => Q("track_total_hits"); - set => Q("track_total_hits", value); - } - ///Specify whether aggregation and suggester names should be prefixed by their respective types in the response public bool? TypedKeys { diff --git a/src/Nest/Search/MultiSearch/MultiSearchJsonConverter.cs b/src/Nest/Search/MultiSearch/MultiSearchFormatter.cs similarity index 95% rename from src/Nest/Search/MultiSearch/MultiSearchJsonConverter.cs rename to src/Nest/Search/MultiSearch/MultiSearchFormatter.cs index 35c95621e30..de46ef816d0 100644 --- a/src/Nest/Search/MultiSearch/MultiSearchJsonConverter.cs +++ b/src/Nest/Search/MultiSearch/MultiSearchFormatter.cs @@ -4,7 +4,7 @@ namespace Nest { - internal class MultiSearchJsonConverter : IJsonFormatter + internal class MultiSearchFormatter : IJsonFormatter { private const byte Newline = (byte)'\n'; diff --git a/src/Nest/Search/MultiSearch/MultiSearchRequest.cs b/src/Nest/Search/MultiSearch/MultiSearchRequest.cs index 01c09f773d4..67a52df1f32 100644 --- a/src/Nest/Search/MultiSearch/MultiSearchRequest.cs +++ b/src/Nest/Search/MultiSearch/MultiSearchRequest.cs @@ -6,7 +6,7 @@ namespace Nest { [MapsApi("msearch.json")] - [JsonFormatter(typeof(MultiSearchJsonConverter))] + [JsonFormatter(typeof(MultiSearchFormatter))] public partial interface IMultiSearchRequest { IDictionary Operations { get; set; } diff --git a/src/Nest/Search/Search/SearchRequest.cs b/src/Nest/Search/Search/SearchRequest.cs index 210ce7d9cc1..b8e98d4f260 100644 --- a/src/Nest/Search/Search/SearchRequest.cs +++ b/src/Nest/Search/Search/SearchRequest.cs @@ -9,70 +9,162 @@ namespace Nest [ReadAs(typeof(SearchRequest))] public partial interface ISearchRequest : ITypedSearchRequest { + /// + /// Specifies the aggregations to perform + /// [DataMember(Name = "aggs")] AggregationDictionary Aggregations { get; set; } + /// + /// Allows to collapse search results based on field values. + /// The collapsing is done by selecting only the top sorted document per collapse key. + /// For instance the query below retrieves the best tweet for each user and sorts them by number of likes. + /// + /// NOTE: The collapsing is applied to the top hits only and does not affect aggregations. + /// You can only collapse to a depth of 2. + /// + /// [DataMember(Name = "collapse")] IFieldCollapse Collapse { get; set; } + /// + /// Enables explanation for each hit on how its score was computed + /// [DataMember(Name = "explain")] bool? Explain { get; set; } + /// + /// The starting from index of the hits to return. Defaults to 0. + /// [DataMember(Name = "from")] int? From { get; set; } + /// + /// Allow to highlight search results on one or more fields. The implementation uses the either lucene + /// fast-vector-highlighter or highlighter. + /// [DataMember(Name = "highlight")] IHighlight Highlight { get; set; } + /// + /// Allows to configure different boost level per index when searching across + /// more than one indices. This is very handy when hits coming from one index + /// matter more than hits coming from another index (think social graph where each user has an index). + /// [DataMember(Name = "indices_boost")] [JsonFormatter(typeof(IndicesBoostFormatter))] IDictionary IndicesBoost { get; set; } + /// + /// Allows to filter out documents based on a minimum score + /// [DataMember(Name = "min_score")] double? MinScore { get; set; } + /// + /// Specify a query to apply to the search hits at the very end of a search request, + /// after aggregations have already been calculated. Useful when both search hits and aggregations + /// will be returned in the response, and a filter should only be applied to the search hits. + /// [DataMember(Name = "post_filter")] QueryContainer PostFilter { get; set; } + /// + /// The Profile API provides detailed timing information about the execution of individual components in a query. + /// It gives the user insight into how queries are executed at a low level so that the user can understand + /// why certain queries are slow, and take steps to improve their slow queries. + /// [DataMember(Name = "profile")] bool? Profile { get; set; } + /// + /// Specify the search query to perform + /// [DataMember(Name = "query")] QueryContainer Query { get; set; } + /// + /// Specify one or more queries to use for rescoring + /// [DataMember(Name = "rescore")] IList Rescore { get; set; } + /// + /// Allows to return a script evaluation (based on different fields) for each hit + /// [DataMember(Name = "script_fields")] IScriptFields ScriptFields { get; set; } + /// + /// Sort values that can be used to start returning results "after" any document in the result list. + /// [DataMember(Name = "search_after")] IList SearchAfter { get; set; } + /// The number of hits to return. Defaults to 10. [DataMember(Name = "size")] int? Size { get; set; } + /// + /// For scroll queries that return a lot of documents it is possible to split the scroll in multiple slices which can be + /// consumed independently + /// [DataMember(Name = "slice")] ISlicedScroll Slice { get; set; } + /// + /// Specifies how to sort the search hits + /// [DataMember(Name = "sort")] IList Sort { get; set; } + /// + /// Specify how the _source field is returned for each search hit. + /// When true, _source retrieval is enabled (default) + /// When false, _source retrieval is disabled, and no _source will be returned for each hit + /// When is specified, fields to include/exclude can be controlled + /// [DataMember(Name = "_source")] Union Source { get; set; } + /// + /// The suggest feature suggests similar looking terms based on a provided text by using a suggester + /// [DataMember(Name = "suggest")] ISuggestContainer Suggest { get; set; } + /// + /// The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate + /// early. + /// If set, the response will have a boolean field terminated_early to indicate whether the query execution has actually + /// terminated_early. + /// [DataMember(Name = "terminate_after")] long? TerminateAfter { get; set; } + /// + /// A search timeout, bounding the search request to be executed within the + /// specified time value and bail with the hits accumulated up + /// to that point, when expired. Defaults to no timeout. + /// [DataMember(Name = "timeout")] string Timeout { get; set; } + /// + /// Make sure we keep calculating score even if we are sorting on a field. + /// [DataMember(Name = "track_scores")] bool? TrackScores { get; set; } + /// + /// Indicate if the number of documents that match the query should be tracked. + /// + [DataMember(Name = "track_total_hits")] + bool? TrackTotalHits { get; set; } + + /// + /// Return a version for each search hit + /// [DataMember(Name = "version")] bool? Version { get; set; } } @@ -85,33 +177,56 @@ public partial interface ISearchRequest : ISearchRequest { } [DataContract] public partial class SearchRequest { - public Fields StoredFields { get; set; } - public Fields DocValueFields { get; set; } + /// public AggregationDictionary Aggregations { get; set; } + /// public IFieldCollapse Collapse { get; set; } + /// + public Fields DocValueFields { get; set; } + /// public bool? Explain { get; set; } + /// public int? From { get; set; } - + /// public IHighlight Highlight { get; set; } - + /// [JsonFormatter(typeof(IndicesBoostFormatter))] public IDictionary IndicesBoost { get; set; } - + /// public double? MinScore { get; set; } + /// public QueryContainer PostFilter { get; set; } + /// public bool? Profile { get; set; } + /// public QueryContainer Query { get; set; } + /// public IList Rescore { get; set; } + /// public IScriptFields ScriptFields { get; set; } + /// public IList SearchAfter { get; set; } + /// public int? Size { get; set; } + /// public ISlicedScroll Slice { get; set; } + /// public IList Sort { get; set; } + /// public Union Source { get; set; } + /// + public Fields StoredFields { get; set; } + /// public ISuggestContainer Suggest { get; set; } + /// public long? TerminateAfter { get; set; } + /// public string Timeout { get; set; } + /// public bool? TrackScores { get; set; } + /// + public bool? TrackTotalHits { get; set; } + /// public bool? Version { get; set; } protected override HttpMethod HttpMethod => @@ -148,7 +263,6 @@ public partial class SearchDescriptor where TInferDocument : cla bool? ISearchRequest.Explain { get; set; } int? ISearchRequest.From { get; set; } IHighlight ISearchRequest.Highlight { get; set; } - IDictionary ISearchRequest.IndicesBoost { get; set; } double? ISearchRequest.MinScore { get; set; } QueryContainer ISearchRequest.PostFilter { get; set; } @@ -164,83 +278,61 @@ public partial class SearchDescriptor where TInferDocument : cla Fields ISearchRequest.StoredFields { get; set; } ISuggestContainer ISearchRequest.Suggest { get; set; } long? ISearchRequest.TerminateAfter { get; set; } - string ISearchRequest.Timeout { get; set; } bool? ISearchRequest.TrackScores { get; set; } + bool? ISearchRequest.TrackTotalHits { get; set; } bool? ISearchRequest.Version { get; set; } protected sealed override void RequestDefaults(SearchRequestParameters parameters) => TypedKeys(); - public SearchDescriptor Aggregations(Func, IAggregationContainer> aggregationsSelector) => + /// + public SearchDescriptor Aggregations( + Func, IAggregationContainer> aggregationsSelector + ) => Assign(aggregationsSelector(new AggregationContainerDescriptor())?.Aggregations, (a, v) => a.Aggregations = v); + /// public SearchDescriptor Aggregations(AggregationDictionary aggregations) => Assign(aggregations, (a, v) => a.Aggregations = v); + /// public SearchDescriptor Source(bool enabled = true) => Assign(enabled, (a, v) => a.Source = v); + /// public SearchDescriptor Source(Func, ISourceFilter> selector) => Assign(selector, (a, v) => a.Source = new Union(v?.Invoke(new SourceFilterDescriptor()))); - /// The number of hits to return. Defaults to 10. + /// public SearchDescriptor Size(int? size) => Assign(size, (a, v) => a.Size = v); - /// - /// The number of hits to return. Alias for . Defaults to 10. - /// + /// public SearchDescriptor Take(int? take) => Size(take); - /// - /// The starting from index of the hits to return. Defaults to 0. - /// + /// public SearchDescriptor From(int? from) => Assign(from, (a, v) => a.From = v); - /// - /// The starting from index of the hits to return. Alias for . Defaults to 0. - /// + /// public SearchDescriptor Skip(int? skip) => From(skip); - /// - /// A search timeout, bounding the search request to be executed within the - /// specified time value and bail with the hits accumulated up - /// to that point when expired. Defaults to no timeout. - /// + /// public SearchDescriptor Timeout(string timeout) => Assign(timeout, (a, v) => a.Timeout = v); - /// - /// Enables explanation for each hit on how its score was computed. - /// (Use .DocumentsWithMetadata on the return results) - /// + /// public SearchDescriptor Explain(bool? explain = true) => Assign(explain, (a, v) => a.Explain = v); - /// - /// Returns a version for each search hit. (Use .DocumentsWithMetadata on the return results) - /// + /// public SearchDescriptor Version(bool? version = true) => Assign(version, (a, v) => a.Version = v); - /// - /// Make sure we keep calculating score even if we are sorting on a field. - /// + /// public SearchDescriptor TrackScores(bool? trackscores = true) => Assign(trackscores, (a, v) => a.TrackScores = v); - /// - /// The Profile API provides detailed timing information about the execution of individual components in a query. - /// It gives the user insight into how queries are executed at a low level so that the user can understand - /// why certain queries are slow, and take steps to improve their slow queries. - /// + /// public SearchDescriptor Profile(bool? profile = true) => Assign(profile, (a, v) => a.Profile = v); - /// - /// Allows to filter out documents based on a minimum score: - /// + /// public SearchDescriptor MinScore(double? minScore) => Assign(minScore, (a, v) => a.MinScore = v); - /// - /// The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate - /// early. - /// If set, the response will have a boolean field terminated_early to indicate whether the query execution has actually - /// terminated_early. - /// + /// public SearchDescriptor TerminateAfter(long? terminateAfter) => Assign(terminateAfter, (a, v) => a.TerminateAfter = v); /// @@ -274,102 +366,76 @@ public SearchDescriptor Source(Func /// - public SearchDescriptor ExecuteOnPreferredNode(string node) => Preference(node.IsNullOrEmpty() ? null : $"_prefer_node:{node}"); + public SearchDescriptor ExecuteOnPreferredNode(string node) => + Preference(node.IsNullOrEmpty() ? null : $"_prefer_node:{node}"); - /// - /// Allows to configure different boost level per index when searching across - /// more than one indices. This is very handy when hits coming from one index - /// matter more than hits coming from another index (think social graph where each user has an index). - /// + /// public SearchDescriptor IndicesBoost(Func, FluentDictionary> boost) => Assign(boost, (a, v) => a.IndicesBoost = v?.Invoke(new FluentDictionary())); - /// - /// Allows to selectively load specific fields for each document - /// represented by a search hit. Defaults to load the internal _source field. - /// + /// public SearchDescriptor StoredFields(Func, IPromise> fields) => Assign(fields, (a, v) => a.StoredFields = v?.Invoke(new FieldsDescriptor())?.Value); + /// public SearchDescriptor StoredFields(Fields fields) => Assign(fields, (a, v) => a.StoredFields = v); + /// public SearchDescriptor ScriptFields(Func> selector) => Assign(selector, (a, v) => a.ScriptFields = v?.Invoke(new ScriptFieldsDescriptor())?.Value); + /// public SearchDescriptor DocValueFields(Func, IPromise> fields) => Assign(fields, (a, v) => a.DocValueFields = v?.Invoke(new FieldsDescriptor())?.Value); + /// public SearchDescriptor DocValueFields(Fields fields) => Assign(fields, (a, v) => a.DocValueFields = v); - /// - /// A comma-separated list of fields to return as the field data representation of a field for each hit - /// + /// public SearchDescriptor Sort(Func, IPromise>> selector) => Assign(selector, (a, v) => a.Sort = v?.Invoke(new SortDescriptor())?.Value); - /// - /// Sort values that can be used to start returning results "after" any document in the result list. - /// + /// public SearchDescriptor SearchAfter(IList searchAfter) => Assign(searchAfter, (a, v) => a.SearchAfter = v); - /// - /// Sort values that can be used to start returning results "after" any document in the result list. - /// + /// public SearchDescriptor SearchAfter(params object[] searchAfter) => Assign(searchAfter, (a, v) => a.SearchAfter = v); - /// - /// The suggest feature suggests similar looking terms based on a provided text by using a suggester - /// + /// public SearchDescriptor Suggest(Func, IPromise> selector) => Assign(selector, (a, v) => a.Suggest = v?.Invoke(new SuggestContainerDescriptor())?.Value); - /// - /// Describe the query to perform using a query descriptor lambda - /// + /// public SearchDescriptor Query(Func, QueryContainer> query) => Assign(query, (a, v) => a.Query = v?.Invoke(new QueryContainerDescriptor())); - /// - /// For scroll queries that return a lot of documents it is possible to split the scroll in multiple slices which can be - /// consumed independently - /// + /// public SearchDescriptor Slice(Func, ISlicedScroll> selector) => Assign(selector, (a, v) => a.Slice = v?.Invoke(new SlicedScrollDescriptor())); /// /// Shortcut to default to a match all query /// - public SearchDescriptor MatchAll(Func selector = null) => Query(q => q.MatchAll(selector)); + public SearchDescriptor MatchAll(Func selector = null) => + Query(q => q.MatchAll(selector)); - /// - /// Filter search using a filter descriptor lambda - /// + /// public SearchDescriptor PostFilter(Func, QueryContainer> filter) => Assign(filter, (a, v) => a.PostFilter = v?.Invoke(new QueryContainerDescriptor())); - /// - /// Allow to highlight search results on one or more fields. The implementation uses the either lucene - /// fast-vector-highlighter or highlighter. - /// + /// public SearchDescriptor Highlight(Func, IHighlight> highlightSelector) => Assign(highlightSelector, (a, v) => a.Highlight = v?.Invoke(new HighlightDescriptor())); - /// - /// Allows to collapse search results based on field values. - /// The collapsing is done by selecting only the top sorted document per collapse key. - /// For instance the query below retrieves the best tweet for each user and sorts them by number of likes. - /// - /// NOTE: The collapsing is applied to the top hits only and does not affect aggregations. - /// You can only collapse to a depth of 2. - /// - /// + /// public SearchDescriptor Collapse(Func, IFieldCollapse> collapseSelector) => Assign(collapseSelector, (a, v) => a.Collapse = v?.Invoke(new FieldCollapseDescriptor())); - /// - /// Allows you to specify one or more queries to use for rescoring - /// + /// public SearchDescriptor Rescore(Func, IPromise>> rescoreSelector) => Assign(rescoreSelector, (a, v) => a.Rescore = v?.Invoke(new RescoringDescriptor()).Value); + + /// + public SearchDescriptor TrackTotalHits(bool? trackTotalHits = true) => Assign(trackTotalHits, (a, v) => a.TrackTotalHits = v); } } diff --git a/src/Tests/Tests.Reproduce/GithubIssue3907.cs b/src/Tests/Tests.Reproduce/GithubIssue3907.cs new file mode 100644 index 00000000000..e958e5f7f34 --- /dev/null +++ b/src/Tests/Tests.Reproduce/GithubIssue3907.cs @@ -0,0 +1,32 @@ +using System; +using System.Net; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Domain; + +namespace Tests.Reproduce +{ + public class GithubIssue3907 : IClusterFixture + { + private readonly IntrusiveOperationCluster _cluster; + + // use intrusive operation cluster because we're changing the underlying http handler + // and this cluster runs with a max concurrency of 1, so changing http handler + // will not affect other integration tests + public GithubIssue3907(IntrusiveOperationCluster cluster) => _cluster = cluster; + + [I] + public void NotUsingSocketsHttpHandlerDoesNotCauseException() + { + AppContext.SetSwitch("System.Net.Http.UseSocketsHttpHandler", false); + + var response = _cluster.Client.Indices.Exists("non_existent_index"); + response.ApiCall.HttpStatusCode.Should().Be(404); + response.OriginalException.Should().BeNull(); + + AppContext.SetSwitch("System.Net.Http.UseSocketsHttpHandler", true); + } + } +} diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs index fca55ce516f..a79ba57bc4c 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs @@ -833,6 +833,7 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase (n, tf) => tf .WordDelimiterGraph(n, t => t + .AdjustOffsets() .CatenateAll() .CatenateNumbers() .CatenateWords() @@ -848,6 +849,7 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase new WordDelimiterGraphTokenFilter { + AdjustOffsets = true, CatenateAll = true, CatenateNumbers = true, CatenateWords = true, @@ -863,6 +865,7 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase new { type = "word_delimiter_graph", + adjust_offsets = true, generate_word_parts = true, generate_number_parts = true, catenate_words = true, diff --git a/src/Tests/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs b/src/Tests/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs index 144e06d54b7..3d039b4fe36 100644 --- a/src/Tests/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs +++ b/src/Tests/Tests/CommonOptions/DateMath/DateMathExpressions.doc.cs @@ -61,7 +61,15 @@ [U] public void SimpleExpressions() * anchor will be an actual `DateTime`, even after a serialization/deserialization round trip */ var date = new DateTime(2015, 05, 05); - Expect("2015-05-05T00:00:00") + + /** + * will serialize to + */ + //json + var expected = "2015-05-05T00:00:00"; + + // hide + Expect(expected) .WhenSerializing(date) .AssertSubject(dateMath => ((IDateMath)dateMath) .Anchor.Match( @@ -69,6 +77,28 @@ [U] public void SimpleExpressions() s => s.Should().BeNull() ) ); + + /** + * When the `DateTime` is local or UTC, the time zone information is included. + * For example, for a UTC `DateTime` + */ + var utcDate = new DateTime(2015, 05, 05, 0, 0, 0, DateTimeKind.Utc); + + /** + * will serialize to + */ + //json + expected = "2015-05-05T00:00:00Z"; + + // hide + Expect(expected) + .WhenSerializing(utcDate) + .AssertSubject(dateMath => ((IDateMath)dateMath) + .Anchor.Match( + d => d.Should().Be(utcDate), + s => s.Should().BeNull() + ) + ); } [U] public void ComplexExpressions() diff --git a/src/Tests/Tests/QueryDsl/Compound/FunctionScore/FunctionScoreQueryUsageTests.cs b/src/Tests/Tests/QueryDsl/Compound/FunctionScore/FunctionScoreQueryUsageTests.cs index 52a46f11f11..c447de19448 100644 --- a/src/Tests/Tests/QueryDsl/Compound/FunctionScore/FunctionScoreQueryUsageTests.cs +++ b/src/Tests/Tests/QueryDsl/Compound/FunctionScore/FunctionScoreQueryUsageTests.cs @@ -37,7 +37,19 @@ public FunctionScoreQueryUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba MinScore = 1.0, Functions = new List { - new ExponentialDecayFunction { Origin = 1.0, Decay = 0.5, Field = Field(p => p.NumberOfCommits), Scale = 0.1, Weight = 2.1 }, + new ExponentialDecayFunction + { + Origin = 1.0, + Decay = 0.5, + Field = Field(p => p.NumberOfCommits), + Scale = 0.1, + Weight = 2.1, + Filter = new NumericRangeQuery + { + Field = Field(f => f.NumberOfContributors), + GreaterThan = 10 + } + }, new GaussDateDecayFunction { Origin = DateMath.Now, Field = Field(p => p.LastActivity), Decay = 0.5, Scale = TimeSpan.FromDays(1) }, new LinearGeoDecayFunction @@ -52,7 +64,7 @@ public FunctionScoreQueryUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba new RandomScoreFunction { Seed = 1337, Field = "_seq_no" }, new RandomScoreFunction { Seed = "randomstring", Field = "_seq_no" }, new WeightFunction { Weight = 1.0 }, - new ScriptScoreFunction { Script = new InlineScript("Math.log(2 + doc['numberOfCommits'].value)") } + new ScriptScoreFunction { Script = new InlineScript("Math.log(2 + doc['numberOfCommits'].value)"), Weight = 2.0 } } }; @@ -76,7 +88,17 @@ public FunctionScoreQueryUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba decay = 0.5 } }, - weight = 2.1 + weight = 2.1, + filter = new + { + range = new + { + numberOfContributors = new + { + gt = 10.0 + } + } + } }, new { @@ -127,7 +149,8 @@ public FunctionScoreQueryUsageTests(ReadOnlyCluster i, EndpointUsage usage) : ba { source = "Math.log(2 + doc['numberOfCommits'].value)" } - } + }, + weight = 2.0 } }, max_boost = 20.0, @@ -150,15 +173,36 @@ protected override QueryContainer QueryFluent(QueryContainerDescriptor .MaxBoost(20.0) .MinScore(1.0) .Functions(f => f - .Exponential(b => b.Field(p => p.NumberOfCommits).Decay(0.5).Origin(1.0).Scale(0.1).Weight(2.1)) + .Exponential(b => b + .Field(p => p.NumberOfCommits) + .Decay(0.5) + .Origin(1.0) + .Scale(0.1) + .Weight(2.1) + .Filter(fi => fi + .Range(r => r + .Field(p => p.NumberOfContributors) + .GreaterThan(10) + ) + ) + ) .GaussDate(b => b.Field(p => p.LastActivity).Origin(DateMath.Now).Decay(0.5).Scale("1d")) - .LinearGeoLocation(b => - b.Field(p => p.LocationPoint).Origin(new GeoLocation(70, -70)).Scale(Distance.Miles(1)).MultiValueMode(MultiValueMode.Average)) + .LinearGeoLocation(b => b + .Field(p => p.LocationPoint) + .Origin(new GeoLocation(70, -70)) + .Scale(Distance.Miles(1)) + .MultiValueMode(MultiValueMode.Average) + ) .FieldValueFactor(b => b.Field(p => p.NumberOfContributors).Factor(1.1).Missing(0.1).Modifier(FieldValueFactorModifier.Square)) .RandomScore(r => r.Seed(1337).Field("_seq_no")) .RandomScore(r => r.Seed("randomstring").Field("_seq_no")) .Weight(1.0) - .ScriptScore(s => s.Script(ss => ss.Source("Math.log(2 + doc['numberOfCommits'].value)"))) + .ScriptScore(s => s + .Script(ss => ss + .Source("Math.log(2 + doc['numberOfCommits'].value)") + ) + .Weight(2) + ) ) ); } diff --git a/src/Tests/Tests/QueryDsl/Geo/Shape/GeoWKTTests.cs b/src/Tests/Tests/QueryDsl/Geo/Shape/GeoWKTTests.cs index b941cf94ae7..dbbbecce7f8 100644 --- a/src/Tests/Tests/QueryDsl/Geo/Shape/GeoWKTTests.cs +++ b/src/Tests/Tests/QueryDsl/Geo/Shape/GeoWKTTests.cs @@ -23,6 +23,22 @@ public void ReadAndWritePoint() GeoWKTWriter.Write(point).Should().Be(wkt); } + [U] + public void ReadAndWritePointWithExponent() + { + var wkt = "POINT (1.2E2 -2.5E-05)"; + var shape = GeoWKTReader.Read(wkt); + + shape.Should().BeOfType(); + var point = (PointGeoShape)shape; + + point.Coordinates.Latitude.Should().Be(-0.000025); + point.Coordinates.Longitude.Should().Be(120); + + // 1.2E2 will be expanded + GeoWKTWriter.Write(point).Should().Be("POINT (120 -2.5E-05)"); + } + [U] public void ReadAndWriteMultiPoint() { diff --git a/src/Tests/Tests/QueryDsl/Specialized/ScriptScore/ScriptScoreQueryUsageTests.cs b/src/Tests/Tests/QueryDsl/Specialized/ScriptScore/ScriptScoreQueryUsageTests.cs new file mode 100644 index 00000000000..66ba1f04a48 --- /dev/null +++ b/src/Tests/Tests/QueryDsl/Specialized/ScriptScore/ScriptScoreQueryUsageTests.cs @@ -0,0 +1,122 @@ +using System.Collections.Generic; +using Nest; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Domain; +using Tests.Framework.EndpointTests.TestState; + +namespace Tests.QueryDsl.Specialized.ScriptScore +{ + /** + * A query allowing you to modify the score of documents that are retrieved by a query. + * This can be useful if, for example, a score function is computationally expensive and + * it is sufficient to compute the score on a filtered set of documents. + * + * See the Elasticsearch documentation on {ref_current}/query-dsl-script-score-query.html[script_score query] for more details. + */ + public class ScriptScoreQueryUsageTests : QueryDslUsageTestsBase + { + private static readonly string _scriptScoreSource = "decayNumericLinear(params.origin, params.scale, params.offset, params.decay, doc['numberOfCommits'].value)"; + + public ScriptScoreQueryUsageTests(ReadOnlyCluster i, EndpointUsage usage) : base(i, usage) { } + + protected override ConditionlessWhen ConditionlessWhen => new ConditionlessWhen(a => a.ScriptScore) + { + q => + { + q.Query = null; + }, + q => + { + q.Script = null; + }, + q => + { + q.Script = new InlineScript(null); + }, + q => + { + q.Script = new InlineScript(""); + }, + q => + { + q.Script = new IndexedScript(null); + }, + q => + { + q.Script = new IndexedScript(""); + } + }; + + protected override QueryContainer QueryInitializer => new ScriptScoreQuery + { + Name = "named_query", + Boost = 1.1, + Query = new NumericRangeQuery + { + Field = Infer.Field(f => f.NumberOfCommits), + GreaterThan = 50 + }, + Script = new InlineScript(_scriptScoreSource) + { + Params = new Dictionary + { + { "origin", 100 }, + { "scale", 10 }, + { "decay", 0.5 }, + { "offset", 0 } + } + }, + }; + + protected override object QueryJson => new + { + script_score = new + { + _name = "named_query", + boost = 1.1, + query = new + { + range = new + { + numberOfCommits = new + { + gt = 50.0 + } + } + }, + script = new + { + source = _scriptScoreSource, + @params = new + { + origin = 100, + scale = 10, + decay = 0.5, + offset = 0 + } + } + } + }; + + protected override QueryContainer QueryFluent(QueryContainerDescriptor q) => q + .ScriptScore(sn => sn + .Name("named_query") + .Boost(1.1) + .Query(qq => qq + .Range(r => r + .Field(f => f.NumberOfCommits) + .GreaterThan(50) + ) + ) + .Script(s => s + .Source(_scriptScoreSource) + .Params(p => p + .Add("origin", 100) + .Add("scale", 10) + .Add("decay", 0.5) + .Add("offset", 0) + ) + ) + ); + } +} diff --git a/src/Tests/Tests/Search/MultiSearch/MultiSearchApiTests.cs b/src/Tests/Tests/Search/MultiSearch/MultiSearchApiTests.cs index 1ccd6f1beb5..3192ac7fc7e 100644 --- a/src/Tests/Tests/Search/MultiSearch/MultiSearchApiTests.cs +++ b/src/Tests/Tests/Search/MultiSearch/MultiSearchApiTests.cs @@ -25,7 +25,7 @@ public MultiSearchApiTests(ReadOnlyCluster cluster, EndpointUsage usage) : base( protected override string UrlPath => "/project/_msearch"; protected override int ExpectStatusCode => 200; protected override HttpMethod HttpMethod => HttpMethod.POST; - + protected override LazyResponses ClientUsage() => Calls( (c, f) => c.MultiSearch(Index(), f), (c, f) => c.MultiSearchAsync(Index(), f), @@ -37,7 +37,7 @@ protected override LazyResponses ClientUsage() => Calls( protected override object ExpectJson => new object[] { new { }, - new { from = 0, size = 10, query = new { match_all = new { } } }, + new { from = 0, size = 10, query = new { match_all = new { } }, track_total_hits = true }, new { search_type = "dfs_query_then_fetch" }, new { }, new { index = "devs" }, @@ -60,7 +60,7 @@ protected override LazyResponses ClientUsage() => Calls( protected override MultiSearchDescriptor NewDescriptor() => new MultiSearchDescriptor(Index()); protected override Func Fluent => ms => ms - .Search("10projects", s => s.Query(q => q.MatchAll()).From(0).Size(10)) + .Search("10projects", s => s.Query(q => q.MatchAll()).From(0).Size(10).TrackTotalHits()) .Search("dfs_projects", s => s.SearchType(SearchType.DfsQueryThenFetch)) .Search("5developers", s => s.Query(q => q.MatchAll()).From(0).Size(5)) .Search("infer_type_name", s => s.Index("devs").From(0).Size(5).MatchAll()) @@ -90,7 +90,7 @@ protected override LazyResponses ClientUsage() => Calls( { Operations = new Dictionary { - { "10projects", new SearchRequest { From = 0, Size = 10, Query = new QueryContainer(new MatchAllQuery()) } }, + { "10projects", new SearchRequest { From = 0, Size = 10, Query = new QueryContainer(new MatchAllQuery()), TrackTotalHits = true } }, { "dfs_projects", new SearchRequest { SearchType = SearchType.DfsQueryThenFetch } }, { "5developers", new SearchRequest { From = 0, Size = 5, Query = new QueryContainer(new MatchAllQuery()) } }, { "infer_type_name", new SearchRequest("devs") { From = 0, Size = 5, Query = new QueryContainer(new MatchAllQuery()) } }, @@ -135,6 +135,7 @@ [I] public Task AssertResponse() => AssertOnAllResponses(r => var projects = r.GetResponse("10projects"); projects.ShouldBeValid(); projects.Documents.Should().HaveCount(10); + projects.HitsMetadata.Total.Relation.Should().Be(TotalHitsRelation.EqualTo); var projectsCount = r.GetResponse("count_project"); projectsCount.Should().BeNull();