From ddf1f211ce44b5c92df11174bf2efc724082fb00 Mon Sep 17 00:00:00 2001 From: Vivek Ratnavel Subramanian Date: Thu, 3 Sep 2020 12:46:40 -0700 Subject: [PATCH 1/4] HDDS-4196. Add an endpoint in Recon to query Prometheus --- .../hadoop/hdds/recon/ReconConfigKeys.java | 4 + .../recon/MetricsServiceProviderFactory.java | 75 +++++++ .../hadoop/ozone/recon/ReconConstants.java | 3 + .../ozone/recon/ReconControllerModule.java | 1 + .../ozone/recon/ReconServerConfigKeys.java | 10 + .../apache/hadoop/ozone/recon/ReconUtils.java | 15 +- .../ozone/recon/api/MetricsProxyEndpoint.java | 117 ++++++++++ .../ozone/recon/api/PipelineEndpoint.java | 57 ++++- .../hadoop/ozone/recon/metrics/Metric.java | 58 +++++ .../recon/spi/MetricsServiceProvider.java | 60 +++++ .../impl/OzoneManagerServiceProviderImpl.java | 2 +- .../impl/PrometheusServiceProviderImpl.java | 211 ++++++++++++++++++ .../hadoop/ozone/recon/ReconTestInjector.java | 3 + .../hadoop/ozone/recon/TestReconUtils.java | 6 +- .../hadoop/ozone/recon/api/TestEndpoints.java | 71 +++++- .../TestOzoneManagerServiceProviderImpl.java | 9 +- .../resources/prometheus-test-response.txt | 21 ++ 17 files changed, 704 insertions(+), 19 deletions(-) create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/MetricsServiceProvider.java create mode 100644 hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java create mode 100644 hadoop-ozone/recon/src/test/resources/prometheus-test-response.txt diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/recon/ReconConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/recon/ReconConfigKeys.java index c0b1d5d9386a..cbfa64c08539 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/recon/ReconConfigKeys.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/recon/ReconConfigKeys.java @@ -41,4 +41,8 @@ private ReconConfigKeys() { public static final String OZONE_RECON_DATANODE_BIND_HOST_DEFAULT = "0.0.0.0"; public static final int OZONE_RECON_DATANODE_PORT_DEFAULT = 9891; + // Prometheus HTTP endpoint including port + // ex: http://localhost:9090 + public static final String OZONE_RECON_PROMETHEUS_HTTP_ENDPOINT = + "ozone.recon.prometheus.http.endpoint"; } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java new file mode 100644 index 000000000000..41533f8bec12 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon; + +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.recon.ReconConfigKeys; +import org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider; +import org.apache.hadoop.ozone.recon.spi.impl.PrometheusServiceProviderImpl; + +import javax.inject.Inject; +import javax.inject.Singleton; + +/** + * Factory class that is used to get the instance of configured Metrics Service + * Provider. + */ +@Singleton +public class MetricsServiceProviderFactory { + + private OzoneConfiguration configuration; + private ReconUtils reconUtils; + + @Inject + public MetricsServiceProviderFactory(OzoneConfiguration configuration, + ReconUtils reconUtils) { + this.configuration = configuration; + this.reconUtils = reconUtils; + } + + /** + * Returns the configured MetricsServiceProvider implementation (defaults + * to prometheus). + * If no metrics service providers are configured, returns null. + * + * @return MetricsServiceProvider instance that is configured. + */ + public MetricsServiceProvider getMetricsServiceProvider() { + String prometheusEndpoint = getPrometheusEndpoint(); + if (prometheusEndpoint != null && !prometheusEndpoint.isEmpty()) { + return new PrometheusServiceProviderImpl(configuration, reconUtils); + } + return null; + } + + /** + * Returns the Prometheus endpoint if configured. Otherwise returns null. + * + * @return Prometheus endpoint if configured, null otherwise. + */ + private String getPrometheusEndpoint() { + String endpoint = configuration.getTrimmed( + ReconConfigKeys.OZONE_RECON_PROMETHEUS_HTTP_ENDPOINT); + // Remove the trailing slash from endpoint url. + if (endpoint != null && endpoint.endsWith("/")) { + endpoint = endpoint.substring(0, endpoint.length() - 1); + } + return endpoint; + } +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java index 0cbc61fa2459..385883b782d5 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java @@ -52,6 +52,9 @@ private ReconConstants() { public static final String RECON_QUERY_BUCKET = "bucket"; public static final String RECON_QUERY_FILE_SIZE = "fileSize"; + public static final String PROMETHEUS_INSTANT_QUERY_API = "query"; + public static final String PROMETHEUS_RANGED_QUERY_API = "query_range"; + public static final String RECON_SCM_CONTAINER_DB = "recon-" + CONTAINER_DB_SUFFIX; public static final String RECON_SCM_PIPELINE_DB = "recon-" diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java index 89917675a174..cb667f43855b 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconControllerModule.java @@ -108,6 +108,7 @@ protected void configure() { .to(StorageContainerServiceProviderImpl.class).in(Singleton.class); bind(OzoneStorageContainerManager.class) .to(ReconStorageContainerManagerFacade.class).in(Singleton.class); + bind(MetricsServiceProviderFactory.class).in(Singleton.class); } static class ReconOmTaskBindingModule extends AbstractModule { diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java index 704c18e18357..3eeb59667ebf 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java @@ -96,7 +96,17 @@ public final class ReconServerConfigKeys { public static final String OZONE_RECON_HTTP_AUTH_TYPE = OZONE_RECON_HTTP_AUTH_CONFIG_PREFIX + "type"; + public static final String RECON_PROMETHEUS_CONNECTION_TIMEOUT = + "ozone.recon.prometheus.connection.timeout"; + public static final String RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT = + "10s"; + + public static final String RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT = + "ozone.recon.prometheus.connection.timeout"; + + public static final String + RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT = "10s"; /** * Private constructor for utility class. diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java index b6e4d7c0f179..752cbce10bb3 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java @@ -24,14 +24,14 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; -import java.io.InputStream; +import java.net.HttpURLConnection; import java.net.URL; -import java.net.URLConnection; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.Timestamp; import java.util.zip.GZIPOutputStream; +import com.google.inject.Singleton; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.hdds.conf.ConfigurationSource; @@ -59,6 +59,7 @@ /** * Recon Utility class. */ +@Singleton public class ReconUtils { private final static int WRITE_BUFFER = 1048576; //1MB @@ -222,20 +223,20 @@ public void untarCheckpointFile(File tarFile, Path destPath) } /** - * Make HTTP GET call on the URL and return inputstream to the response. + * Make HTTP GET call on the URL and return HttpURLConnection instance. * @param connectionFactory URLConnectionFactory to use. * @param url url to call * @param isSpnego is SPNEGO enabled - * @return Inputstream to the response of the HTTP call. + * @return HttpURLConnection instance of the HTTP call. * @throws IOException, AuthenticationException While reading the response. */ - public InputStream makeHttpCall(URLConnectionFactory connectionFactory, + public HttpURLConnection makeHttpCall(URLConnectionFactory connectionFactory, String url, boolean isSpnego) throws IOException, AuthenticationException { - URLConnection urlConnection = + HttpURLConnection urlConnection = (HttpURLConnection) connectionFactory.openConnection(new URL(url), isSpnego); urlConnection.connect(); - return urlConnection.getInputStream(); + return urlConnection; } /** diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java new file mode 100644 index 000000000000..c52054e86ef0 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon.api; + +import org.apache.hadoop.ozone.recon.MetricsServiceProviderFactory; +import org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider; +import org.apache.hadoop.ozone.recon.spi.impl.PrometheusServiceProviderImpl; + +import javax.inject.Inject; +import javax.servlet.http.HttpServletResponse; +import javax.ws.rs.DefaultValue; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; + +import java.io.InputStream; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; + +import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; + +/** + * Endpoint to fetch metrics data from Prometheus HTTP endpoint. + */ +@Path("/metrics") +public class MetricsProxyEndpoint { + + private MetricsServiceProvider metricsServiceProvider; + + @Inject + public MetricsProxyEndpoint( + MetricsServiceProviderFactory metricsServiceProviderFactory) { + this.metricsServiceProvider = + metricsServiceProviderFactory.getMetricsServiceProvider(); + } + + /** + * Return a response from the configured metrics endpoint. + */ + @GET + @Produces(MediaType.APPLICATION_JSON) + @Path("/{api}") + public void getMetricsResponse( + @DefaultValue (PROMETHEUS_INSTANT_QUERY_API) @PathParam("api") String api, + @Context UriInfo uriInfo, + @Context HttpServletResponse httpServletResponse + ) throws Exception { + if (metricsServiceProvider != null) { + HttpURLConnection connection = metricsServiceProvider.getMetricsResponse( + api, uriInfo.getRequestUri().getQuery()); + InputStream inputStream; + if (Response.Status.fromStatusCode(connection.getResponseCode()) + .getFamily() == Response.Status.Family.SUCCESSFUL) { + inputStream = connection.getInputStream(); + } else { + // Throw a bad gateway error if HttpResponseCode is not 2xx + httpServletResponse.setStatus(HttpServletResponse.SC_BAD_GATEWAY); + inputStream = connection.getErrorStream(); + } + try ( + OutputStream outputStream = + httpServletResponse.getOutputStream(); + ReadableByteChannel inputChannel = + Channels.newChannel(inputStream); + WritableByteChannel outputChannel = + Channels.newChannel(outputStream) + ) { + final ByteBuffer buffer = ByteBuffer.allocateDirect(16 * 1024); + + while(inputChannel.read(buffer) != -1) { + buffer.flip(); + outputChannel.write(buffer); + buffer.compact(); + } + + buffer.flip(); + + while(buffer.hasRemaining()) { + outputChannel.write(buffer); + } + } finally { + inputStream.close(); + } + } else { + // Throw a Bad Gateway Error + httpServletResponse.sendError(HttpServletResponse.SC_BAD_GATEWAY, + "Metrics endpoint is not configured. Configure " + + PrometheusServiceProviderImpl.getEndpointConfigKey() + " and " + + "try again later."); + } + } +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java index d75469719388..42812a5edc42 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java @@ -20,9 +20,12 @@ import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; +import org.apache.hadoop.ozone.recon.MetricsServiceProviderFactory; import org.apache.hadoop.ozone.recon.api.types.PipelineMetadata; import org.apache.hadoop.ozone.recon.api.types.PipelinesResponse; +import org.apache.hadoop.ozone.recon.metrics.Metric; import org.apache.hadoop.ozone.recon.scm.ReconPipelineManager; +import org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -36,6 +39,8 @@ import java.time.Instant; import java.util.ArrayList; import java.util.List; +import java.util.Optional; +import java.util.TreeMap; import java.util.UUID; /** @@ -49,10 +54,15 @@ public class PipelineEndpoint { LoggerFactory.getLogger(PipelineEndpoint.class); private ReconPipelineManager pipelineManager; + private MetricsServiceProvider metricsServiceProvider; @Inject - PipelineEndpoint(OzoneStorageContainerManager reconSCM) { + PipelineEndpoint(OzoneStorageContainerManager reconSCM, + MetricsServiceProviderFactory + metricsServiceProviderFactory) { this.pipelineManager = (ReconPipelineManager) reconSCM.getPipelineManager(); + this.metricsServiceProvider = + metricsServiceProviderFactory.getMetricsServiceProvider(); } /** @@ -89,19 +99,56 @@ public Response getPipelines() { LOG.warn("Cannot get containers for pipeline {} ", pipelineId, ioEx); } - PipelineMetadata pipelineMetadata = builder.setPipelineId(pipelineId) + PipelineMetadata.Builder pipelineBuilder = + builder.setPipelineId(pipelineId) .setDatanodes(datanodes) .setDuration(duration) .setStatus(pipeline.getPipelineState()) .setReplicationFactor(pipeline.getFactor().getNumber()) - .setReplicationType(pipeline.getType().toString()) - .build(); + .setReplicationType(pipeline.getType().toString()); - pipelinesList.add(pipelineMetadata); + // If any metrics service providers like Prometheus + // is configured, then query it for metrics and populate + // leader election count and last leader election time + if (metricsServiceProvider != null) { + // Extract last part of pipelineId to get its group Id. + // ex. group id of 48981bf7-8bea-4fbd-9857-79df51ee872d + // is group-79DF51EE872D + String[] splits = pipelineId.toString().split("-"); + String groupId = "group-" + splits[splits.length-1].toUpperCase(); + Optional leaderElectionCount = getMetricValue( + "ratis_leader_election_electionCount", groupId); + leaderElectionCount.ifPresent(pipelineBuilder::setLeaderElections); + Optional leaderElectionTime = getMetricValue( + "ratis_leader_election_lastLeaderElectionTime", groupId); + leaderElectionTime.ifPresent(pipelineBuilder::setLastLeaderElection); + } + + pipelinesList.add(pipelineBuilder.build()); }); PipelinesResponse pipelinesResponse = new PipelinesResponse(pipelinesList.size(), pipelinesList); return Response.ok(pipelinesResponse).build(); } + + private Optional getMetricValue(String metricName, String groupId) { + String metricsQuery = String.format( + "query=%s{group=\"%s\"}", metricName, groupId); + try { + List metrics = metricsServiceProvider.getMetricsInstant( + metricsQuery); + if (!metrics.isEmpty()) { + TreeMap values = (TreeMap) + metrics.get(0).getValues(); + if (!values.isEmpty()) { + return Optional.of(values.firstEntry().getValue().longValue()); + } + } + } catch (Exception ex) { + LOG.error("Unable to get metrics for " + + "ratis_leader_election_electionCount.", ex); + } + return Optional.empty(); + } } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java new file mode 100644 index 000000000000..7a1b40f29478 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon.metrics; + +import org.apache.hadoop.hdds.annotation.InterfaceAudience; + +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; + +/** + * Class for wrapping a metric response from + * {@link org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider}. + */ +@InterfaceAudience.Private +public final class Metric { + + private Map metadata; + private TreeMap values; + + public Metric(Map metadata, + SortedMap values) { + this.metadata = metadata; + this.values = (TreeMap) values; + } + + public Map getMetadata() { + return metadata; + } + + public void setMetadata(Map metadata) { + this.metadata = metadata; + } + + public SortedMap getValues() { + return values; + } + + public void setValues(SortedMap values) { + this.values = (TreeMap) values; + } +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/MetricsServiceProvider.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/MetricsServiceProvider.java new file mode 100644 index 000000000000..588794f4c8dd --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/MetricsServiceProvider.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ozone.recon.spi; + +import org.apache.hadoop.ozone.recon.metrics.Metric; + +import java.net.HttpURLConnection; +import java.util.List; + +/** + * Interface to access Ozone metrics. + */ +public interface MetricsServiceProvider { + + /** + * Returns {@link HttpURLConnection} after querying Metrics endpoint for the + * given metric. + * + * @param api api. + * @param queryString query string with metric name and other filters. + * @return HttpURLConnection + * @throws Exception exception + */ + HttpURLConnection getMetricsResponse(String api, String queryString) + throws Exception; + + /** + * Returns a list of {@link Metric} for the given instant query. + * + * @param queryString query string with metric name and other filters. + * @return List of Json map of metrics response. + * @throws Exception exception + */ + List getMetricsInstant(String queryString) throws Exception; + + /** + * Returns a list of {@link Metric} for the given ranged query. + * + * @param queryString query string with metric name, start time, end time, + * step and other filters. + * @return List of Json map of metrics response. + * @throws Exception exception + */ + List getMetricsRanged(String queryString) throws Exception; +} diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java index 94c952097c03..dc70d6097f75 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/OzoneManagerServiceProviderImpl.java @@ -281,7 +281,7 @@ DBCheckpoint getOzoneManagerDBSnapshot() { SecurityUtil.doAsLoginUser(() -> { try (InputStream inputStream = reconUtils.makeHttpCall( connectionFactory, getOzoneManagerSnapshotUrl(), - isOmSpengoEnabled())) { + isOmSpengoEnabled()).getInputStream()) { FileUtils.copyInputStreamToFile(inputStream, targetFile); } return null; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java new file mode 100644 index 000000000000..7ccb257a0f35 --- /dev/null +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java @@ -0,0 +1,211 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

+ * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ozone.recon.spi.impl; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.recon.ReconConfigKeys; +import org.apache.hadoop.hdfs.web.URLConnectionFactory; +import org.apache.hadoop.ozone.recon.ReconUtils; +import org.apache.hadoop.ozone.recon.metrics.Metric; +import org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Singleton; +import javax.ws.rs.core.Response; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.TimeUnit; + +import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; +import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_RANGED_QUERY_API; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_TIMEOUT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT; + +/** + * Implementation of the Prometheus Metrics Service provider. + */ +@Singleton +public class PrometheusServiceProviderImpl + implements MetricsServiceProvider { + + private static final Logger LOG = + LoggerFactory.getLogger(PrometheusServiceProviderImpl.class); + + private URLConnectionFactory connectionFactory; + private final String prometheusEndpoint; + private ReconUtils reconUtils; + + public PrometheusServiceProviderImpl(OzoneConfiguration configuration, + ReconUtils reconUtils) { + + int connectionTimeout = (int) configuration.getTimeDuration( + RECON_PROMETHEUS_CONNECTION_TIMEOUT, + RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); + int connectionRequestTimeout = (int) configuration.getTimeDuration( + RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT, + RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT, + TimeUnit.MILLISECONDS); + + connectionFactory = + URLConnectionFactory.newDefaultURLConnectionFactory(connectionTimeout, + connectionRequestTimeout, configuration); + + String endpoint = configuration.getTrimmed(getEndpointConfigKey()); + // Remove the trailing slash from endpoint url. + if (endpoint != null && endpoint.endsWith("/")) { + endpoint = endpoint.substring(0, endpoint.length() - 1); + } + this.prometheusEndpoint = endpoint; + this.reconUtils = reconUtils; + } + + /** + * Returns {@link HttpURLConnection} after querying Metrics endpoint for the + * given metric. + * + * @param api api. + * @param queryString query string with metric name and other filters. + * @return HttpURLConnection + * @throws Exception exception + */ + @Override + public HttpURLConnection getMetricsResponse(String api, String queryString) + throws Exception { + String url = String.format("%s/api/v1/%s?%s", prometheusEndpoint, api, + queryString); + return reconUtils.makeHttpCall(connectionFactory, + url, false); + } + + /** + * Returns the endpoint configuration key for the Metrics service provider. + * + * @return endpoint configuration key. + */ + public static String getEndpointConfigKey() { + return ReconConfigKeys.OZONE_RECON_PROMETHEUS_HTTP_ENDPOINT; + } + + /** + * Returns a list of {@link Metric} for the given instant query. + * + * @param queryString query string with metric name and other filters. + * @return List of Json map of metrics response. + * @throws Exception exception + */ + @Override + public List getMetricsInstant(String queryString) + throws Exception { + return getMetrics(PROMETHEUS_INSTANT_QUERY_API, queryString); + } + + /** + * Returns a list of {@link Metric} for the given ranged query. + * + * @param queryString query string with metric name, start time, end time, + * step and other filters. + * @return List of Json map of metrics response. + * @throws Exception exception + */ + @Override + public List getMetricsRanged(String queryString) throws Exception { + return getMetrics(PROMETHEUS_RANGED_QUERY_API, queryString); + } + + /** + * Returns a list of {@link Metric} for the given api and query string. + * + * @param api api + * @param queryString query string with metric name and other filters. + * @return List of Json map of metrics response. + * @throws Exception + */ + private List getMetrics(String api, String queryString) + throws Exception { + HttpURLConnection urlConnection = + getMetricsResponse(api, queryString); + List metrics = null; + if (Response.Status.fromStatusCode(urlConnection.getResponseCode()) + .getFamily() == Response.Status.Family.SUCCESSFUL) { + InputStream inputStream = urlConnection.getInputStream(); + ObjectMapper mapper = new ObjectMapper(); + Map jsonMap = mapper.readValue(inputStream, Map.class); + inputStream.close(); + String status = (String) jsonMap.get("status"); + if (status != null && status.equals("success")) { + // For range queries, the resultType will be "matrix" + // and for instant queries, the resultType will be "vector" + Map data = (Map) jsonMap.get( + "data"); + if (data != null) { + String resultType = (String) data.get("resultType"); + if (resultType != null) { + String valueKey = resultType.equals("matrix") ? "values" : "value"; + List> result = (List>) + data.get("result"); + if (!result.isEmpty()) { + metrics = new ArrayList<>(); + // value is an array of timestamp and value. + // ex. "value": [1599032677.002,"1"] for "vector" + // values is an array of array of timestamp and value. + // ex. "values": [[1599032677.002,"1"]] for "matrix" + for (Map metricJson : result) { + Map metadata = + (Map) metricJson.get("metric"); + TreeMap values = new TreeMap<>(); + List> valuesJson = new ArrayList<>(); + if (resultType.equals("matrix")) { + valuesJson = (List>) metricJson.get(valueKey); + } else if (resultType.equals("vector")) { + valuesJson.add((List) metricJson.get(valueKey)); + } + for (List value : valuesJson) { + if (value.size() == 2) { + values.put((Double) value.get(0), + Double.parseDouble((String) value.get(1))); + } + } + metrics.add(new Metric(metadata, values)); + } + } + } + } + } else { + if (LOG.isErrorEnabled()) { + LOG.error(String.format("Error while getting metrics: %s", + jsonMap.get("error"))); + } + } + } else { + LOG.error("Error while connecting to metrics endpoint. Got a " + + "status code " + urlConnection.getResponseCode() + ": " + + urlConnection.getResponseMessage()); + } + return metrics; + } +} + diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/ReconTestInjector.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/ReconTestInjector.java index b8529536059f..ccdc2799ea0b 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/ReconTestInjector.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/ReconTestInjector.java @@ -19,6 +19,7 @@ package org.apache.hadoop.ozone.recon; import static org.apache.hadoop.hdds.recon.ReconConfigKeys.OZONE_RECON_DATANODE_ADDRESS_KEY; +import static org.apache.hadoop.hdds.recon.ReconConfigKeys.OZONE_RECON_PROMETHEUS_HTTP_ENDPOINT; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_DB_DIR; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_OM_SNAPSHOT_DB_DIR; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR; @@ -219,6 +220,8 @@ public OzoneConfiguration getTestOzoneConfiguration( configuration.set(OZONE_RECON_SCM_DB_DIR, dir.getAbsolutePath()); configuration.set(OZONE_RECON_DATANODE_ADDRESS_KEY, "0.0.0.0:0"); + configuration.set(OZONE_RECON_PROMETHEUS_HTTP_ENDPOINT, + "http://localhost:6666"); return configuration; } diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/TestReconUtils.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/TestReconUtils.java index 095129924703..91c8b64a1d8d 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/TestReconUtils.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/TestReconUtils.java @@ -33,9 +33,9 @@ import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; +import java.net.HttpURLConnection; import java.nio.charset.Charset; import java.nio.file.Paths; -import java.net.URLConnection; import java.net.URL; import org.apache.commons.io.FileUtils; @@ -147,12 +147,12 @@ public void testMakeHttpCall() throws Exception { String contents; URLConnectionFactory connectionFactoryMock = mock(URLConnectionFactory.class); - URLConnection urlConnectionMock = mock(URLConnection.class); + HttpURLConnection urlConnectionMock = mock(HttpURLConnection.class); when(urlConnectionMock.getInputStream()).thenReturn(fileInputStream); when(connectionFactoryMock.openConnection(any(URL.class), anyBoolean())) .thenReturn(urlConnectionMock); try (InputStream inputStream = new ReconUtils() - .makeHttpCall(connectionFactoryMock, url, false)) { + .makeHttpCall(connectionFactoryMock, url, false).getInputStream()) { contents = IOUtils.toString(inputStream, Charset.defaultCharset()); } diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java index c7392a763164..bdc55d277e3f 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java @@ -18,6 +18,7 @@ package org.apache.hadoop.ozone.recon.api; +import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hdds.protocol.DatanodeDetails; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; @@ -40,12 +41,15 @@ import org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; import org.apache.hadoop.hdds.utils.db.TypedTable; +import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.ozone.om.OMMetadataManager; import org.apache.hadoop.ozone.om.OmMetadataManagerImpl; import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs; +import org.apache.hadoop.ozone.recon.MetricsServiceProviderFactory; import org.apache.hadoop.ozone.recon.ReconTestInjector; +import org.apache.hadoop.ozone.recon.ReconUtils; import org.apache.hadoop.ozone.recon.api.types.ClusterStateResponse; import org.apache.hadoop.ozone.recon.api.types.DatanodeMetadata; import org.apache.hadoop.ozone.recon.api.types.DatanodesResponse; @@ -79,16 +83,28 @@ import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getTestReconOmMetadataManager; import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.initializeNewOmMetadataManager; import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.writeDataToOm; +import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; import static org.hadoop.ozone.recon.schema.tables.GlobalStatsTable.GLOBAL_STATS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.BDDMockito.given; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import javax.servlet.ServletOutputStream; +import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.Response; +import javax.ws.rs.core.UriInfo; +import java.io.File; import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.URI; import java.util.List; import java.util.UUID; import java.util.concurrent.Callable; @@ -101,6 +117,7 @@ public class TestEndpoints extends AbstractReconSqlDBTest { private PipelineEndpoint pipelineEndpoint; private ClusterStateEndpoint clusterStateEndpoint; private UtilizationEndpoint utilizationEndpoint; + private MetricsProxyEndpoint metricsProxyEndpoint; private ReconOMMetadataManager reconOMMetadataManager; private FileSizeCountTask fileSizeCountTask; private TableCountTask tableCountTask; @@ -119,11 +136,14 @@ public class TestEndpoints extends AbstractReconSqlDBTest { private final String host2 = "host2.datanode"; private final String ip1 = "1.1.1.1"; private final String ip2 = "2.2.2.2"; + private final String prometheusTestResponseFile = + "prometheus-test-response.txt"; + private ReconUtils reconUtilsMock; @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); - private void initializeInjector() throws IOException { + private void initializeInjector() throws Exception { reconOMMetadataManager = getTestReconOmMetadataManager( initializeNewOmMetadataManager(temporaryFolder.newFolder()), temporaryFolder.newFolder()); @@ -156,6 +176,17 @@ private void initializeInjector() throws IOException { when(mockScmServiceProvider.getContainerWithPipeline(containerId)) .thenReturn(containerWithPipeline); + InputStream inputStream = + Thread.currentThread().getContextClassLoader().getResourceAsStream( + prometheusTestResponseFile); + reconUtilsMock = mock(ReconUtils.class); + HttpURLConnection urlConnectionMock = mock(HttpURLConnection.class); + when(urlConnectionMock.getResponseCode()) + .thenReturn(HttpServletResponse.SC_OK); + when(urlConnectionMock.getInputStream()).thenReturn(inputStream); + when(reconUtilsMock.makeHttpCall(any(URLConnectionFactory.class), + anyString(), anyBoolean())).thenReturn(urlConnectionMock); + ReconTestInjector reconTestInjector = new ReconTestInjector.Builder(temporaryFolder) .withReconSqlDb() @@ -167,8 +198,10 @@ private void initializeInjector() throws IOException { ReconStorageContainerManagerFacade.class) .addBinding(ClusterStateEndpoint.class) .addBinding(NodeEndpoint.class) + .addBinding(MetricsServiceProviderFactory.class) .addBinding(ContainerSchemaManager.class) .addBinding(UtilizationEndpoint.class) + .addBinding(ReconUtils.class, reconUtilsMock) .addBinding(StorageContainerLocationProtocol.class, mockScmClient) .build(); @@ -190,6 +223,10 @@ private void initializeInjector() throws IOException { reconTestInjector.getInstance(OzoneStorageContainerManager.class); clusterStateEndpoint = new ClusterStateEndpoint(reconScm, globalStatsDao); + MetricsServiceProviderFactory metricsServiceProviderFactory = + reconTestInjector.getInstance(MetricsServiceProviderFactory.class); + metricsProxyEndpoint = + new MetricsProxyEndpoint(metricsServiceProviderFactory); dslContext = getDslContext(); } @@ -415,6 +452,7 @@ public void testGetPipelines() throws Exception { pipelineMetadata.getLeaderNode()); Assert.assertEquals(pipeline.getId().getId(), pipelineMetadata.getPipelineId()); + Assert.assertEquals(5, pipelineMetadata.getLeaderElections()); waitAndCheckConditionAfterHeartbeat(() -> { Response response1 = pipelineEndpoint.getPipelines(); @@ -426,6 +464,37 @@ public void testGetPipelines() throws Exception { }); } + @Test + public void testGetMetricsResponse() throws Exception { + HttpServletResponse responseMock = mock(HttpServletResponse.class); + ServletOutputStream outputStreamMock = mock(ServletOutputStream.class); + when(responseMock.getOutputStream()).thenReturn(outputStreamMock); + UriInfo uriInfoMock = mock(UriInfo.class); + URI uriMock = mock(URI.class); + when(uriMock.getQuery()).thenReturn(""); + when(uriInfoMock.getRequestUri()).thenReturn(uriMock); + + // Mock makeHttpCall to send a json response + // when the prometheus endpoint is queried. + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + InputStream inputStream = classLoader + .getResourceAsStream(prometheusTestResponseFile); + HttpURLConnection urlConnectionMock = mock(HttpURLConnection.class); + when(urlConnectionMock.getResponseCode()) + .thenReturn(HttpServletResponse.SC_OK); + when(urlConnectionMock.getInputStream()).thenReturn(inputStream); + when(reconUtilsMock.makeHttpCall(any(URLConnectionFactory.class), + anyString(), anyBoolean())).thenReturn(urlConnectionMock); + + metricsProxyEndpoint.getMetricsResponse(PROMETHEUS_INSTANT_QUERY_API, + uriInfoMock, responseMock); + + byte[] fileBytes = FileUtils.readFileToByteArray( + new File(classLoader.getResource(prometheusTestResponseFile).getFile()) + ); + verify(outputStreamMock).write(fileBytes, 0, fileBytes.length); + } + @Test public void testGetClusterState() throws Exception { Response response = clusterStateEndpoint.getClusterState(); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/spi/impl/TestOzoneManagerServiceProviderImpl.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/spi/impl/TestOzoneManagerServiceProviderImpl.java index e9899148a7d5..92feaeda8a7a 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/spi/impl/TestOzoneManagerServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/spi/impl/TestOzoneManagerServiceProviderImpl.java @@ -46,6 +46,7 @@ import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; +import java.net.HttpURLConnection; import java.nio.file.Paths; import org.apache.hadoop.hdds.conf.OzoneConfiguration; @@ -113,8 +114,10 @@ public void testUpdateReconOmDBWithNewSnapshot() throws Exception { File tarFile = createTarFile(checkpoint.getCheckpointLocation()); InputStream inputStream = new FileInputStream(tarFile); ReconUtils reconUtilsMock = getMockReconUtils(); + HttpURLConnection httpURLConnectionMock = mock(HttpURLConnection.class); + when(httpURLConnectionMock.getInputStream()).thenReturn(inputStream); when(reconUtilsMock.makeHttpCall(any(), anyString(), anyBoolean())) - .thenReturn(inputStream); + .thenReturn(httpURLConnectionMock); ReconTaskController reconTaskController = getMockTaskController(); @@ -164,8 +167,10 @@ public void testGetOzoneManagerDBSnapshot() throws Exception { File tarFile = createTarFile(checkpointDir.toPath()); InputStream fileInputStream = new FileInputStream(tarFile); ReconUtils reconUtilsMock = getMockReconUtils(); + HttpURLConnection httpURLConnectionMock = mock(HttpURLConnection.class); + when(httpURLConnectionMock.getInputStream()).thenReturn(fileInputStream); when(reconUtilsMock.makeHttpCall(any(), anyString(), anyBoolean())) - .thenReturn(fileInputStream); + .thenReturn(httpURLConnectionMock); ReconOMMetadataManager reconOMMetadataManager = mock(ReconOMMetadataManager.class); diff --git a/hadoop-ozone/recon/src/test/resources/prometheus-test-response.txt b/hadoop-ozone/recon/src/test/resources/prometheus-test-response.txt new file mode 100644 index 000000000000..bcbd1569067b --- /dev/null +++ b/hadoop-ozone/recon/src/test/resources/prometheus-test-response.txt @@ -0,0 +1,21 @@ +{ + "status": "success", + "data": { + "resultType": "vector", + "result": [ + { + "metric": { + "__name__": "ratis_leader_election_electionCount", + "exported_instance": "33a5ac1d-8c65-4c74-a0b8-9314dfcccb42", + "group": "group-03CA9397D54B", + "instance": "ozone_datanode_1:9882", + "job": "ozone" + }, + "value": [ + 1599159384.455, + "5" + ] + } + ] + } +} \ No newline at end of file From 19e35f6b3a39d67ab4c40c301348a44f9b10670c Mon Sep 17 00:00:00 2001 From: Vivek Ratnavel Subramanian Date: Thu, 3 Sep 2020 18:24:10 -0700 Subject: [PATCH 2/4] Fix rat and integration test failure --- .../ozone/TestOzoneConfigurationFields.java | 4 +++- hadoop-ozone/pom.xml | 3 ++- .../ozone/recon/ReconServerConfigKeys.java | 12 ++++++------ .../spi/impl/PrometheusServiceProviderImpl.java | 16 ++++++++-------- 4 files changed, 19 insertions(+), 16 deletions(-) diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java index cbd5db505f38..dec0c9c5067f 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestOzoneConfigurationFields.java @@ -1,4 +1,4 @@ -/** +/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -68,6 +68,8 @@ private void addPropertiesNotInXml() { OzoneConfigKeys.OZONE_ACL_AUTHORIZER_CLASS_NATIVE, OzoneConfigKeys.OZONE_S3_AUTHINFO_MAX_LIFETIME_KEY, ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR, + ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_TIMEOUT, + ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT, OMConfigKeys.OZONE_OM_RATIS_SNAPSHOT_AUTO_TRIGGER_THRESHOLD_KEY // TODO HDDS-2856 )); diff --git a/hadoop-ozone/pom.xml b/hadoop-ozone/pom.xml index 6f4b2b2e82ea..f5dccb6eced5 100644 --- a/hadoop-ozone/pom.xml +++ b/hadoop-ozone/pom.xml @@ -267,7 +267,7 @@ **/output.xml **/log.html **/report.html - .idea/** + **/.idea/** **/.ssh/id_rsa* dev-support/*tests dev-support/checkstyle* @@ -304,6 +304,7 @@ **/yarn.lock **/pnpm-lock.yaml **/ozone-recon-web/build/** + src/test/resources/prometheus-test-response.txt src/main/license/** src/main/resources/proto.lock diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java index 3eeb59667ebf..78e805151c7d 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconServerConfigKeys.java @@ -96,17 +96,17 @@ public final class ReconServerConfigKeys { public static final String OZONE_RECON_HTTP_AUTH_TYPE = OZONE_RECON_HTTP_AUTH_CONFIG_PREFIX + "type"; - public static final String RECON_PROMETHEUS_CONNECTION_TIMEOUT = - "ozone.recon.prometheus.connection.timeout"; + public static final String RECON_METRICS_HTTP_CONNECTION_TIMEOUT = + "ozone.recon.metrics.http.connection.timeout"; - public static final String RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT = + public static final String RECON_METRICS_HTTP_CONNECTION_TIMEOUT_DEFAULT = "10s"; - public static final String RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT = - "ozone.recon.prometheus.connection.timeout"; + public static final String RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT = + "ozone.recon.metrics.http.connection.request.timeout"; public static final String - RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT = "10s"; + RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT_DEFAULT = "10s"; /** * Private constructor for utility class. diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java index 7ccb257a0f35..e33f76b439fb 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java @@ -40,10 +40,10 @@ import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_RANGED_QUERY_API; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_TIMEOUT; -import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT_DEFAULT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_TIMEOUT; +import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_TIMEOUT_DEFAULT; /** * Implementation of the Prometheus Metrics Service provider. @@ -63,11 +63,11 @@ public PrometheusServiceProviderImpl(OzoneConfiguration configuration, ReconUtils reconUtils) { int connectionTimeout = (int) configuration.getTimeDuration( - RECON_PROMETHEUS_CONNECTION_TIMEOUT, - RECON_PROMETHEUS_CONNECTION_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); + RECON_METRICS_HTTP_CONNECTION_TIMEOUT, + RECON_METRICS_HTTP_CONNECTION_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); int connectionRequestTimeout = (int) configuration.getTimeDuration( - RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT, - RECON_PROMETHEUS_CONNECTION_REQUEST_TIMEOUT_DEFAULT, + RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT, + RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT_DEFAULT, TimeUnit.MILLISECONDS); connectionFactory = From cd1a5a978cb2270e1da2e6ec8feb577c713b27e2 Mon Sep 17 00:00:00 2001 From: Vivek Ratnavel Subramanian Date: Fri, 4 Sep 2020 14:29:44 -0700 Subject: [PATCH 3/4] Address review comments --- .../recon/MetricsServiceProviderFactory.java | 13 ++++++++++++- .../apache/hadoop/ozone/recon/ReconConstants.java | 3 --- .../ozone/recon/api/MetricsProxyEndpoint.java | 3 ++- .../apache/hadoop/ozone/recon/metrics/Metric.java | 15 ++++----------- .../spi/impl/PrometheusServiceProviderImpl.java | 5 +++-- .../hadoop/ozone/recon/api/TestEndpoints.java | 2 +- 6 files changed, 22 insertions(+), 19 deletions(-) diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java index 41533f8bec12..609f1e4e032b 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/MetricsServiceProviderFactory.java @@ -18,10 +18,13 @@ package org.apache.hadoop.ozone.recon; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.recon.ReconConfigKeys; import org.apache.hadoop.ozone.recon.spi.MetricsServiceProvider; import org.apache.hadoop.ozone.recon.spi.impl.PrometheusServiceProviderImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.inject.Singleton; @@ -33,6 +36,9 @@ @Singleton public class MetricsServiceProviderFactory { + private static final Logger LOG = + LoggerFactory.getLogger(MetricsServiceProviderFactory.class); + private OzoneConfiguration configuration; private ReconUtils reconUtils; @@ -52,7 +58,12 @@ public MetricsServiceProviderFactory(OzoneConfiguration configuration, */ public MetricsServiceProvider getMetricsServiceProvider() { String prometheusEndpoint = getPrometheusEndpoint(); - if (prometheusEndpoint != null && !prometheusEndpoint.isEmpty()) { + if (StringUtils.isNotEmpty(prometheusEndpoint)) { + if (LOG.isInfoEnabled()) { + LOG.info( + String.format("Choosing Prometheus as Metrics service provider " + + "with configured endpoint: %s", prometheusEndpoint)); + } return new PrometheusServiceProviderImpl(configuration, reconUtils); } return null; diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java index 385883b782d5..0cbc61fa2459 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconConstants.java @@ -52,9 +52,6 @@ private ReconConstants() { public static final String RECON_QUERY_BUCKET = "bucket"; public static final String RECON_QUERY_FILE_SIZE = "fileSize"; - public static final String PROMETHEUS_INSTANT_QUERY_API = "query"; - public static final String PROMETHEUS_RANGED_QUERY_API = "query_range"; - public static final String RECON_SCM_CONTAINER_DB = "recon-" + CONTAINER_DB_SUFFIX; public static final String RECON_SCM_PIPELINE_DB = "recon-" diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java index c52054e86ef0..327e9b192663 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/MetricsProxyEndpoint.java @@ -42,7 +42,8 @@ import java.nio.channels.ReadableByteChannel; import java.nio.channels.WritableByteChannel; -import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; +import static org.apache.hadoop.ozone.recon.spi.impl.PrometheusServiceProviderImpl.PROMETHEUS_INSTANT_QUERY_API; + /** * Endpoint to fetch metrics data from Prometheus HTTP endpoint. diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java index 7a1b40f29478..1b492b250c82 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/metrics/Metric.java @@ -31,28 +31,21 @@ @InterfaceAudience.Private public final class Metric { - private Map metadata; - private TreeMap values; + private final Map metadata; + private final TreeMap values; public Metric(Map metadata, SortedMap values) { this.metadata = metadata; - this.values = (TreeMap) values; + this.values = new TreeMap<>(); + this.values.putAll(values); } public Map getMetadata() { return metadata; } - public void setMetadata(Map metadata) { - this.metadata = metadata; - } - public SortedMap getValues() { return values; } - - public void setValues(SortedMap values) { - this.values = (TreeMap) values; - } } diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java index e33f76b439fb..243143d4337d 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/spi/impl/PrometheusServiceProviderImpl.java @@ -38,8 +38,6 @@ import java.util.TreeMap; import java.util.concurrent.TimeUnit; -import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; -import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_RANGED_QUERY_API; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_REQUEST_TIMEOUT_DEFAULT; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.RECON_METRICS_HTTP_CONNECTION_TIMEOUT; @@ -52,6 +50,9 @@ public class PrometheusServiceProviderImpl implements MetricsServiceProvider { + public static final String PROMETHEUS_INSTANT_QUERY_API = "query"; + public static final String PROMETHEUS_RANGED_QUERY_API = "query_range"; + private static final Logger LOG = LoggerFactory.getLogger(PrometheusServiceProviderImpl.class); diff --git a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java index bdc55d277e3f..303a70acb06c 100644 --- a/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java +++ b/hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/api/TestEndpoints.java @@ -83,7 +83,7 @@ import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getTestReconOmMetadataManager; import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.initializeNewOmMetadataManager; import static org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.writeDataToOm; -import static org.apache.hadoop.ozone.recon.ReconConstants.PROMETHEUS_INSTANT_QUERY_API; +import static org.apache.hadoop.ozone.recon.spi.impl.PrometheusServiceProviderImpl.PROMETHEUS_INSTANT_QUERY_API; import static org.hadoop.ozone.recon.schema.tables.GlobalStatsTable.GLOBAL_STATS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; From d87ba9cdf02b9be9944b38f21c171e7830640d3d Mon Sep 17 00:00:00 2001 From: Vivek Ratnavel Subramanian Date: Fri, 4 Sep 2020 14:36:48 -0700 Subject: [PATCH 4/4] Fix error message in PipelineEndpoint. --- .../org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java index 42812a5edc42..045cdc0b3baa 100644 --- a/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java +++ b/hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/PipelineEndpoint.java @@ -146,8 +146,10 @@ private Optional getMetricValue(String metricName, String groupId) { } } } catch (Exception ex) { - LOG.error("Unable to get metrics for " + - "ratis_leader_election_electionCount.", ex); + if (LOG.isErrorEnabled()) { + LOG.error(String.format("Unable to get metrics value for %s", + metricName), ex); + } } return Optional.empty(); }