Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.ambari.metrics.core.timeline.PhoenixHBaseAccessor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.sink.timeline.MetricHostAggregate;

import java.io.FileWriter;
import java.io.IOException;
Expand Down Expand Up @@ -52,8 +53,8 @@ public AbstractPhoenixMetricsCopier(String inputTableName, String outputTableNam
@Override
public void run(){
LOG.info(String.format("Copying %s metrics from %s to %s", metricNames, inputTable, outputTable));
long startTimer = System.currentTimeMillis();
String query = String.format("SELECT %s %s FROM %s WHERE %s AND SERVER_TIME > %s ORDER BY METRIC_NAME, SERVER_TIME",
final long startTimer = System.currentTimeMillis();
final String query = String.format("SELECT %s %s FROM %s WHERE %s AND SERVER_TIME > %s ORDER BY METRIC_NAME, SERVER_TIME",
getQueryHint(startTime), getColumnsClause(), inputTable, getMetricNamesLikeClause(), startTime);

runPhoenixQueryAndAddToResults(query);
Expand All @@ -62,11 +63,12 @@ public void run(){
saveMetrics();
} catch (SQLException e) {
LOG.error(e);
}
long estimatedTime = System.currentTimeMillis() - startTimer;
LOG.debug(String.format("Copying took %s seconds from table %s to table %s for metric names %s", estimatedTime/ 1000.0, inputTable, outputTable, metricNames));
} finally {
final long estimatedTime = System.currentTimeMillis() - startTimer;
LOG.debug(String.format("Copying took %s seconds from table %s to table %s for metric names %s", estimatedTime/ 1000.0, inputTable, outputTable, metricNames));

saveMetricsProgress();
saveMetricsProgress();
}
}

private String getMetricNamesLikeClause() {
Expand Down Expand Up @@ -94,54 +96,39 @@ private String getMetricNamesLikeClause() {

private void runPhoenixQueryAndAddToResults(String query) {
LOG.debug(String.format("Running query: %s", query));
Connection conn = null;
PreparedStatement stmt = null;
try {
conn = hBaseAccessor.getConnection();
stmt = conn.prepareStatement(query);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
addToResults(rs);
try (Connection conn = hBaseAccessor.getConnection();
PreparedStatement stmt = conn.prepareStatement(query)) {
try (ResultSet rs = stmt.executeQuery()) {
while (rs.next()) {
addToResults(rs);
}
}
} catch (SQLException e) {
LOG.error(String.format("Exception during running phoenix query %s", query), e);
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException e) {
// Ignore
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
// Ignore
}
}
}
}

/**
* Saves processed metric names info provided file in format TABLE_NAME:METRIC_NAME
*/
private void saveMetricsProgress() {
if (processedMetricsFile == null) {
if (this.processedMetricsFile == null) {
LOG.info("Skipping metrics progress save as the file is null");
return;
}
for (String metricName : metricNames) {
try {
processedMetricsFile.append(inputTable + ":" + metricName + System.lineSeparator());
} catch (IOException e) {
LOG.error(e);
synchronized (this.processedMetricsFile) {
for (String metricName : metricNames) {
try {
this.processedMetricsFile.append(inputTable).append(":").append(metricName).append(System.lineSeparator());
} catch (IOException e) {
LOG.error(e);
}
}
}
}

protected String getQueryHint(Long startTime) {
StringBuilder sb = new StringBuilder();
final StringBuilder sb = new StringBuilder();
sb.append("/*+ ");
sb.append("NATIVE_TIME_RANGE(");
sb.append(startTime - DEFAULT_NATIVE_TIME_RANGE_DELAY);
Expand All @@ -150,6 +137,15 @@ protected String getQueryHint(Long startTime) {
return sb.toString();
}

protected MetricHostAggregate extractMetricHostAggregate(ResultSet rs) throws SQLException {
MetricHostAggregate metricHostAggregate = new MetricHostAggregate();
metricHostAggregate.setSum(rs.getDouble("METRIC_SUM"));
metricHostAggregate.setNumberOfSamples(rs.getLong("METRIC_COUNT"));
metricHostAggregate.setMax(rs.getDouble("METRIC_MAX"));
metricHostAggregate.setMin(rs.getDouble("METRIC_MIN"));
return metricHostAggregate;
}

/**
* Saves aggregated metrics to the Hbase
* @throws SQLException
Expand Down
Loading