You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2008/11/25 23:40:52 UTC
svn commit: r720630 - in /hadoop/core/trunk: CHANGES.txt
src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java
Author: stack
Date: Tue Nov 25 14:40:52 2008
New Revision: 720630
URL: http://svn.apache.org/viewvc?rev=720630&view=rev
Log:
HBASE-3422 Ganglia counter metrics are all reported with the metric name value, so the counter values can not be seen
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
hadoop/core/trunk/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=720630&r1=720629&r2=720630&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Nov 25 14:40:52 2008
@@ -144,6 +144,10 @@
HADOOP-2774. Add counters tracking records spilled to disk in MapTask and
ReduceTask. (Ravi Gummadi via cdouglas)
+ HADOOP-3422 Ganglia counter metrics are all reported with the metric
+ name "value", so the counter values can not be seen. (Jason Attributor
+ and Brian Bockelman via stack)
+
OPTIMIZATIONS
HADOOP-3293. Fixes FileInputFormat to do provide locations for splits
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java?rev=720630&r1=720629&r2=720630&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/ganglia/GangliaContext.java Tue Nov 25 14:40:52 2008
@@ -29,6 +29,9 @@
import java.util.List;
import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
import org.apache.hadoop.metrics.ContextFactory;
import org.apache.hadoop.metrics.MetricsException;
import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
@@ -54,7 +57,9 @@
private static final int DEFAULT_DMAX = 0;
private static final int DEFAULT_PORT = 8649;
private static final int BUFFER_SIZE = 1500; // as per libgmond.c
-
+
+ private final Log LOG = LogFactory.getLog(this.getClass());
+
private static final Map<Class,String> typeTable = new HashMap<Class,String>(5);
static {
@@ -62,6 +67,7 @@
typeTable.put(Byte.class, "int8");
typeTable.put(Short.class, "int16");
typeTable.put(Integer.class, "int32");
+ typeTable.put(Long.class, "float");
typeTable.put(Float.class, "float");
}
@@ -80,8 +86,7 @@
public GangliaContext() {
}
- public void init(String contextName, ContextFactory factory)
- {
+ public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
String periodStr = getAttribute(PERIOD_PROPERTY);
@@ -112,22 +117,35 @@
se.printStackTrace();
}
}
-
- public void emitRecord(String contextName, String recordName, OutputRecord outRec)
- throws IOException
- {
+
+ public void emitRecord(String contextName, String recordName,
+ OutputRecord outRec)
+ throws IOException {
+ // Setup so that the records have the proper leader names so they are
+ // unambiguous at the ganglia level, and this prevents a lot of rework
+ StringBuilder sb = new StringBuilder();
+ sb.append(contextName);
+ sb.append('.');
+ sb.append(recordName);
+ sb.append('.');
+ int sbBaseLen = sb.length();
+
// emit each metric in turn
for (String metricName : outRec.getMetricNames()) {
Object metric = outRec.getMetric(metricName);
String type = typeTable.get(metric.getClass());
- emitMetric(metricName, type, metric.toString());
+ if (type != null) {
+ sb.append(metricName);
+ emitMetric(sb.toString(), type, metric.toString());
+ sb.setLength(sbBaseLen);
+ } else {
+ LOG.warn("Unknown metrics type: " + metric.getClass());
+ }
}
-
}
private void emitMetric(String name, String type, String value)
- throws IOException
- {
+ throws IOException {
String units = getUnits(name);
int slope = getSlope(name);
int tmax = getTmax(name);
@@ -167,6 +185,9 @@
}
private int getTmax(String metricName) {
+ if (tmaxTable == null) {
+ return DEFAULT_TMAX;
+ }
String tmaxString = tmaxTable.get(metricName);
if (tmaxString == null) {
return DEFAULT_TMAX;
@@ -219,5 +240,4 @@
buffer[offset++] = (byte)((i >> 8) & 0xff);
buffer[offset++] = (byte)(i & 0xff);
}
-
}
Modified: hadoop/core/trunk/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java?rev=720630&r1=720629&r2=720630&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/metrics/jvm/JvmMetrics.java Tue Nov 25 14:40:52 2008
@@ -32,10 +32,9 @@
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.metrics.MetricsException;
/**
- * Singleton class which eports Java Virtual Machine metrics to the metrics API.
+ * Singleton class which reports Java Virtual Machine metrics to the metrics API.
* Any application can create an instance of this class in order to emit
* Java VM metrics.
*/
@@ -58,6 +57,11 @@
private long infoCount = 0;
public synchronized static JvmMetrics init(String processName, String sessionId) {
+ return init(processName, sessionId, "metrics");
+ }
+
+ public synchronized static JvmMetrics init(String processName, String sessionId,
+ String recordName) {
if (theInstance != null) {
log.info("Cannot initialize JVM Metrics with processName=" +
processName + ", sessionId=" + sessionId +
@@ -66,15 +70,16 @@
else {
log.info("Initializing JVM Metrics with processName="
+ processName + ", sessionId=" + sessionId);
- theInstance = new JvmMetrics(processName, sessionId);
+ theInstance = new JvmMetrics(processName, sessionId, recordName);
}
return theInstance;
}
/** Creates a new instance of JvmMetrics */
- private JvmMetrics(String processName, String sessionId) {
+ private JvmMetrics(String processName, String sessionId,
+ String recordName) {
MetricsContext context = MetricsUtil.getContext("jvm");
- metrics = MetricsUtil.createRecord(context, "metrics");
+ metrics = MetricsUtil.createRecord(context, recordName);
metrics.setTag("processName", processName);
metrics.setTag("sessionId", sessionId);
context.registerUpdater(this);