Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public static class Request extends AbstractGetResourcesRequest {

public Request() {
// Put our own defaults for backwards compatibility
super(null, null, true);
super(null, PageParams.defaultParams(), true);
}

public Request(StreamInput in) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

package org.elasticsearch.xpack.ml.integration;

import org.elasticsearch.xpack.core.ml.action.GetFiltersAction;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;

import static org.hamcrest.Matchers.equalTo;

public class MlFiltersIT extends MlNativeIntegTestCase {

@Override
protected void cleanUpResources() {
// nothing to clean
}

public void testGetFilters_ShouldReturnUpTo100ByDefault() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

#nit
This seems like a very expensive test. Spinning up 3 nodes and native processes to make filter API calls.

I suppose it is thorough. Might could move to our single node internal cluster tests at some point.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good point. I fixed it.

int filtersCount = randomIntBetween(11, 100);
for (int i = 0; i < filtersCount; i++) {
putMlFilter(MlFilter.builder("filter-" + i).setItems("item-" + i).build());
}

GetFiltersAction.Response filters = getMlFilters();
assertThat((int) filters.getFilters().count(), equalTo(filtersCount));
assertThat(filters.getFilters().results().size(), equalTo(filtersCount));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
import org.elasticsearch.xpack.core.ml.action.PostDataAction;
import org.elasticsearch.xpack.core.ml.action.PutCalendarAction;
import org.elasticsearch.xpack.core.ml.action.PutDatafeedAction;
import org.elasticsearch.xpack.core.ml.action.PutFilterAction;
import org.elasticsearch.xpack.core.ml.action.PutJobAction;
import org.elasticsearch.xpack.core.ml.action.RevertModelSnapshotAction;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
Expand All @@ -53,7 +52,6 @@
import org.elasticsearch.xpack.core.ml.job.config.Job;
import org.elasticsearch.xpack.core.ml.job.config.JobState;
import org.elasticsearch.xpack.core.ml.job.config.JobUpdate;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts;
import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot;
Expand Down Expand Up @@ -353,10 +351,6 @@ protected List<Forecast> getForecasts(String jobId, ForecastRequestStats forecas
return forecasts;
}

protected PutFilterAction.Response putMlFilter(MlFilter filter) {
return client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet();
}

protected PutCalendarAction.Response putCalendar(String calendarId, List<String> jobIds, String description) {
PutCalendarAction.Request request = new PutCalendarAction.Request(new Calendar(calendarId, jobIds, description));
return client().execute(PutCalendarAction.INSTANCE, request).actionGet();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,15 @@
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.MlTasks;
import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction;
import org.elasticsearch.xpack.core.ml.action.GetFiltersAction;
import org.elasticsearch.xpack.core.ml.action.OpenJobAction;
import org.elasticsearch.xpack.core.ml.action.PutFilterAction;
import org.elasticsearch.xpack.core.ml.action.StartDataFrameAnalyticsAction;
import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState;
import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState;
import org.elasticsearch.xpack.core.ml.job.config.JobTaskState;
import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
import org.elasticsearch.xpack.core.security.SecurityField;
import org.elasticsearch.xpack.core.security.authc.TokenMetaData;
import org.elasticsearch.xpack.ilm.IndexLifecycle;
Expand Down Expand Up @@ -161,6 +164,14 @@ protected DeleteExpiredDataAction.Response deleteExpiredData() throws Exception
return response;
}

protected PutFilterAction.Response putMlFilter(MlFilter filter) {
return client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet();
}

protected GetFiltersAction.Response getMlFilters() {
return client().execute(GetFiltersAction.INSTANCE, new GetFiltersAction.Request()).actionGet();
}

@Override
protected void ensureClusterStateConsistency() throws IOException {
if (cluster() != null && cluster().size() > 0) {
Expand Down