You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ha...@apache.org on 2008/10/13 09:21:36 UTC
svn commit: r703932 - in /hadoop/core/branches/branch-0.18: ./
src/hdfs/org/apache/hadoop/dfs/datanode/metrics/
src/test/org/apache/hadoop/dfs/
Author: hairong
Date: Mon Oct 13 00:21:35 2008
New Revision: 703932
URL: http://svn.apache.org/viewvc?rev=703932&view=rev
Log:
Merge -r 703922:703923 from trunk to main to move the change log of HADOOP-4228 into release 0.18.2 section
Added:
hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDataNodeMetrics.java
Modified:
hadoop/core/branches/branch-0.18/CHANGES.txt
hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeMetrics.java
hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatistics.java
hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatisticsMBean.java
Modified: hadoop/core/branches/branch-0.18/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/CHANGES.txt?rev=703932&r1=703931&r2=703932&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/CHANGES.txt (original)
+++ hadoop/core/branches/branch-0.18/CHANGES.txt Mon Oct 13 00:21:35 2008
@@ -9,6 +9,9 @@
HADOOP-3614. Fix a bug that Datanode may use an old GenerationStamp to get
meta file. (szetszwo)
+ HADOOP-4228. dfs datanoe metrics, bytes_read and bytes_written, overflow
+ due to incorrect type used. (hairong)
+
NEW FEATURES
HADOOP-2421. Add jdiff output to documentation, listing all API
Modified: hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeMetrics.java?rev=703932&r1=703931&r2=703932&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeMetrics.java (original)
+++ hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeMetrics.java Mon Oct 13 00:21:35 2008
@@ -24,6 +24,7 @@
import org.apache.hadoop.metrics.Updater;
import org.apache.hadoop.metrics.jvm.JvmMetrics;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
+import org.apache.hadoop.metrics.util.MetricsLongValue;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
@@ -44,10 +45,10 @@
private DataNodeStatistics datanodeStats;
- public MetricsTimeVaryingInt bytesWritten =
- new MetricsTimeVaryingInt("bytes_written");
- public MetricsTimeVaryingInt bytesRead =
- new MetricsTimeVaryingInt("bytes_read");
+ public MetricsLongValue bytesWritten =
+ new MetricsLongValue("bytes_written");
+ public MetricsLongValue bytesRead =
+ new MetricsLongValue("bytes_read");
public MetricsTimeVaryingInt blocksWritten =
new MetricsTimeVaryingInt("blocks_written");
public MetricsTimeVaryingInt blocksRead =
Modified: hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatistics.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatistics.java?rev=703932&r1=703931&r2=703932&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatistics.java (original)
+++ hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatistics.java Mon Oct 13 00:21:35 2008
@@ -34,7 +34,7 @@
* This constructs and registers the DataNodeStatisticsMBean
* @param dataNodeMetrics - the metrics from which the mbean gets its info
*/
- DataNodeStatistics(DataNodeMetrics dataNodeMetrics, String storageId) {
+ public DataNodeStatistics(DataNodeMetrics dataNodeMetrics, String storageId) {
myMetrics = dataNodeMetrics;
String serverName;
if (storageId.equals("")) {// Temp fix for the uninitialized storage
@@ -92,11 +92,18 @@
/**
* @inheritDoc
*/
- public int getBytesRead() {
- return myMetrics.bytesRead.getPreviousIntervalValue();
+ public long getBytesRead() {
+ return myMetrics.bytesRead.get();
}
/**
+ * {@inheritDoc}
+ */
+ public long getBytesWritten() {
+ return myMetrics.bytesWritten.get();
+ }
+
+ /**
* @inheritDoc
*/
public int getBlockVerificationFailures() {
Modified: hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatisticsMBean.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatisticsMBean.java?rev=703932&r1=703931&r2=703932&view=diff
==============================================================================
--- hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatisticsMBean.java (original)
+++ hadoop/core/branches/branch-0.18/src/hdfs/org/apache/hadoop/dfs/datanode/metrics/DataNodeStatisticsMBean.java Mon Oct 13 00:21:35 2008
@@ -45,10 +45,16 @@
public interface DataNodeStatisticsMBean {
/**
+ * Number of bytes written in the last interval
+ * @return number of bytes written
+ */
+ long getBytesWritten();
+
+ /**
* Number of bytes read in the last interval
* @return number of bytes read
*/
- int getBytesRead();
+ long getBytesRead();
/**
* Number of blocks written in the last interval
Added: hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDataNodeMetrics.java
URL: http://svn.apache.org/viewvc/hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDataNodeMetrics.java?rev=703932&view=auto
==============================================================================
--- hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDataNodeMetrics.java (added)
+++ hadoop/core/branches/branch-0.18/src/test/org/apache/hadoop/dfs/TestDataNodeMetrics.java Mon Oct 13 00:21:35 2008
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.dfs;
+
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.dfs.DFSTestUtil;
+import org.apache.hadoop.dfs.MiniDFSCluster;
+import org.apache.hadoop.dfs.datanode.metrics.DataNodeMetrics;
+import org.apache.hadoop.dfs.datanode.metrics.DataNodeStatistics;
+import org.apache.hadoop.conf.Configuration;
+import junit.framework.TestCase;
+
+public class TestDataNodeMetrics extends TestCase {
+
+ public void testDataNodeMetrics() throws Exception {
+ Configuration conf = new Configuration();
+ conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
+ MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
+ try {
+ FileSystem fs = cluster.getFileSystem();
+ final long LONG_FILE_LEN = Integer.MAX_VALUE+1L;
+ DFSTestUtil.createFile(fs, new Path("/tmp.txt"),
+ LONG_FILE_LEN, (short)1, 1L);
+ List<DataNode> datanodes = cluster.getDataNodes();
+ assertEquals(datanodes.size(), 1);
+ DataNode datanode = datanodes.get(0);
+ DataNodeMetrics metrics = datanode.getMetrics();
+ DataNodeStatistics statistics = new DataNodeStatistics(
+ metrics, datanode.dnRegistration.storageID);
+ assertEquals(LONG_FILE_LEN, statistics.getBytesWritten());
+ } finally {
+ if (cluster != null) {cluster.shutdown();}
+ }
+ }
+}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.dfs;
+
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.dfs.DFSTestUtil;
+import org.apache.hadoop.dfs.MiniDFSCluster;
+import org.apache.hadoop.dfs.datanode.metrics.DataNodeMetrics;
+import org.apache.hadoop.dfs.datanode.metrics.DataNodeStatistics;
+import org.apache.hadoop.conf.Configuration;
+import junit.framework.TestCase;
+
+public class TestDataNodeMetrics extends TestCase {
+
+ public void testDataNodeMetrics() throws Exception {
+ Configuration conf = new Configuration();
+ conf.setBoolean(SimulatedFSDataset.CONFIG_PROPERTY_SIMULATED, true);
+ MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
+ try {
+ FileSystem fs = cluster.getFileSystem();
+ final long LONG_FILE_LEN = Integer.MAX_VALUE+1L;
+ DFSTestUtil.createFile(fs, new Path("/tmp.txt"),
+ LONG_FILE_LEN, (short)1, 1L);
+ List<DataNode> datanodes = cluster.getDataNodes();
+ assertEquals(datanodes.size(), 1);
+ DataNode datanode = datanodes.get(0);
+ DataNodeMetrics metrics = datanode.getMetrics();
+ DataNodeStatistics statistics = new DataNodeStatistics(
+ metrics, datanode.dnRegistration.storageID);
+ assertEquals(LONG_FILE_LEN, statistics.getBytesWritten());
+ } finally {
+ if (cluster != null) {cluster.shutdown();}
+ }
+ }
+}