You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2007/10/01 20:05:55 UTC
svn commit: r581028 - in /lucene/hadoop/trunk: ./
src/java/org/apache/hadoop/fs/ src/test/org/apache/hadoop/dfs/
src/test/org/apache/hadoop/fs/
Author: dhruba
Date: Mon Oct 1 11:05:47 2007
New Revision: 581028
URL: http://svn.apache.org/viewvc?rev=581028&view=rev
Log:
HADOOP-1946. The Datanode code does not need to invoke du on
every heartbeat. (Hairong Kuang via dhruba)
Added:
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java (with props)
lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java (with props)
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Command.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Oct 1 11:05:47 2007
@@ -94,6 +94,9 @@
BUG FIXES
+ HADOOP-1946. The Datanode code does not need to invoke du on
+ every heartbeat. (Hairong Kuang via dhruba)
+
HADOOP-1935. Fix a NullPointerException in internalReleaseCreate.
(Dhruba Borthakur)
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Command.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Command.java?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Command.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Command.java Mon Oct 1 11:05:47 2007
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.fs;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.BufferedReader;
-
-/** A base class for running a unix command like du or df*/
-abstract public class Command {
- /** Run a command */
- protected void run() throws IOException {
- Process process;
- process = Runtime.getRuntime().exec(getExecString());
-
- try {
- if (process.waitFor() != 0) {
- throw new IOException
- (new BufferedReader(new InputStreamReader(process.getErrorStream()))
- .readLine());
- }
- parseExecResult(new BufferedReader(
- new InputStreamReader(process.getInputStream())));
- } catch (InterruptedException e) {
- throw new IOException(e.toString());
- } finally {
- process.destroy();
- }
- }
-
- /** return an array comtaining the command name & its parameters */
- protected abstract String[] getExecString();
-
- /** Parse the execution result */
- protected abstract void parseExecResult(BufferedReader lines)
- throws IOException;
-
- /// A simple implementation of Command
- private static class SimpleCommandExecutor extends Command {
-
- private String[] command;
- private StringBuffer reply;
-
- SimpleCommandExecutor(String[] execString) {
- command = execString;
- }
-
- @Override
- protected String[] getExecString() {
- return command;
- }
-
- @Override
- protected void parseExecResult(BufferedReader lines) throws IOException {
- reply = new StringBuffer();
- char[] buf = new char[512];
- int nRead;
- while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
- reply.append(buf, 0, nRead);
- }
- }
-
- String getReply() {
- return (reply == null) ? "" : reply.toString();
- }
- }
-
- /**
- * Static method to execute a command. Covers most of the simple cases
- * without requiring the user to implement Command interface.
- */
- public static String execCommand(String[] cmd) throws IOException {
- SimpleCommandExecutor exec = new SimpleCommandExecutor(cmd);
- exec.run();
- return exec.getReply();
- }
-}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DF.java Mon Oct 1 11:05:47 2007
@@ -19,7 +19,6 @@
import java.io.File;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.util.StringTokenizer;
@@ -28,13 +27,10 @@
/** Filesystem disk space usage statistics. Uses the unix 'df' program.
* Tested on Linux, FreeBSD, Cygwin. */
-public class DF extends Command {
+public class DF extends ShellCommand {
public static final long DF_INTERVAL_DEFAULT = 3 * 1000; // default DF refresh interval
private String dirPath;
- private long dfInterval; // DF refresh interval in msec
- private long lastDF; // last time doDF() was performed
-
private String filesystem;
private long capacity;
private long used;
@@ -47,18 +43,10 @@
}
public DF(File path, long dfInterval) throws IOException {
+ super(dfInterval);
this.dirPath = path.getCanonicalPath();
- this.dfInterval = dfInterval;
- lastDF = (dfInterval < 0) ? 0 : -dfInterval;
- this.doDF();
}
- private void doDF() throws IOException {
- if (lastDF + dfInterval > System.currentTimeMillis())
- return;
- super.run();
- }
-
/// ACCESSORS
public String getDirPath() {
@@ -66,32 +54,32 @@
}
public String getFilesystem() throws IOException {
- doDF();
+ run();
return filesystem;
}
public long getCapacity() throws IOException {
- doDF();
+ run();
return capacity;
}
public long getUsed() throws IOException {
- doDF();
+ run();
return used;
}
public long getAvailable() throws IOException {
- doDF();
+ run();
return available;
}
public int getPercentUsed() throws IOException {
- doDF();
+ run();
return percentUsed;
}
public String getMount() throws IOException {
- doDF();
+ run();
return mount;
}
@@ -133,7 +121,6 @@
this.available = Long.parseLong(tokens.nextToken()) * 1024;
this.percentUsed = Integer.parseInt(tokens.nextToken());
this.mount = tokens.nextToken();
- this.lastDF = System.currentTimeMillis();
}
public static void main(String[] args) throws Exception {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/DU.java Mon Oct 1 11:05:47 2007
@@ -25,17 +25,14 @@
import org.apache.hadoop.dfs.FSConstants;
/** Filesystem disk space usage statistics. Uses the unix 'du' program*/
-public class DU extends Command {
+public class DU extends ShellCommand {
private String dirPath;
- private long duInterval; // DU refresh interval in msec
- private long lastDU; // last time doDU() was performed
private long used;
public DU(File path, long interval) throws IOException {
+ super(interval);
this.dirPath = path.getCanonicalPath();
- this.duInterval = interval;
- run();
}
public DU(File path, Configuration conf) throws IOException {
@@ -52,9 +49,7 @@
}
synchronized public long getUsed() throws IOException {
- if (lastDU + duInterval > System.currentTimeMillis()) {
- run();
- }
+ run();
return used;
}
@@ -83,7 +78,6 @@
throw new IOException("Illegal du output");
}
this.used = Long.parseLong(tokens[0])*1024;
- this.lastDU = System.currentTimeMillis();
}
public static void main(String[] args) throws Exception {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Mon Oct 1 11:05:47 2007
@@ -257,7 +257,7 @@
/**
* This class is only used on windows to invoke the cygpath command.
*/
- private static class CygPathCommand extends Command {
+ private static class CygPathCommand extends ShellCommand {
String[] command;
String result;
CygPathCommand(String path) throws IOException {
Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java?rev=581028&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java Mon Oct 1 11:05:47 2007
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.BufferedReader;
+
+/** A base class for running a unix command like du or df*/
+abstract public class ShellCommand {
+ private long interval; // refresh interval in msec
+ private long lastTime; // last time the command was performed
+
+ ShellCommand() {
+ this(0L);
+ }
+
+ ShellCommand( long interval ) {
+ this.interval = interval;
+ this.lastTime = (interval<0) ? 0 : -interval;
+ }
+
+ /** check to see if a command needs to be execuated */
+ protected void run() throws IOException {
+ if (lastTime + interval > System.currentTimeMillis())
+ return;
+ runCommand();
+ }
+
+ /** Run a command */
+ private void runCommand() throws IOException {
+ Process process;
+ process = Runtime.getRuntime().exec(getExecString());
+
+ try {
+ if (process.waitFor() != 0) {
+ throw new IOException
+ (new BufferedReader(new InputStreamReader(process.getErrorStream()))
+ .readLine());
+ }
+ parseExecResult(new BufferedReader(
+ new InputStreamReader(process.getInputStream())));
+ } catch (InterruptedException e) {
+ throw new IOException(e.toString());
+ } finally {
+ process.destroy();
+ lastTime = System.currentTimeMillis();
+ }
+ }
+
+ /** return an array comtaining the command name & its parameters */
+ protected abstract String[] getExecString();
+
+ /** Parse the execution result */
+ protected abstract void parseExecResult(BufferedReader lines)
+ throws IOException;
+
+ /// A simple implementation of Command
+ private static class SimpleCommandExecutor extends ShellCommand {
+
+ private String[] command;
+ private StringBuffer reply;
+
+ SimpleCommandExecutor(String[] execString) {
+ command = execString;
+ }
+
+ @Override
+ protected String[] getExecString() {
+ return command;
+ }
+
+ @Override
+ protected void parseExecResult(BufferedReader lines) throws IOException {
+ reply = new StringBuffer();
+ char[] buf = new char[512];
+ int nRead;
+ while ( (nRead = lines.read(buf, 0, buf.length)) > 0 ) {
+ reply.append(buf, 0, nRead);
+ }
+ }
+
+ String getReply() {
+ return (reply == null) ? "" : reply.toString();
+ }
+ }
+
+ /**
+ * Static method to execute a command. Covers most of the simple cases
+ * without requiring the user to implement Command interface.
+ */
+ public static String execCommand(String[] cmd) throws IOException {
+ SimpleCommandExecutor exec = new SimpleCommandExecutor(cmd);
+ exec.run();
+ return exec.getReply();
+ }
+}
Propchange: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/ShellCommand.java
------------------------------------------------------------------------------
svn:keywords = Id Revision HeadURL
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java?rev=581028&r1=581027&r2=581028&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java Mon Oct 1 11:05:47 2007
@@ -27,7 +27,7 @@
import java.util.zip.CRC32;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Command;
+import org.apache.hadoop.fs.ShellCommand;
import org.apache.hadoop.fs.FSInputStream;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.io.UTF8;
@@ -72,7 +72,7 @@
FileUtil.makeShellPath(dataDir) + "' ; tar -xf -)";
LOG.info("Unpacking the tar file. Cmd : " + cmd);
String[] shellCmd = { "bash", "-c", cmd };
- Command.execCommand(shellCmd);
+ ShellCommand.execCommand(shellCmd);
//Now read the reference info
Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java?rev=581028&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java (added)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java Mon Oct 1 11:05:47 2007
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+
+import junit.framework.TestCase;
+
+/** This test makes sure that "DU" does not get to run on each call to getUsed */
+public class TestDU extends TestCase {
+ final static private File DU_DIR = new File(".");
+ final static private File DU_FILE1 = new File(DU_DIR, "tmp1");
+ final static private File DU_FILE2 = new File(DU_DIR, "tmp2");
+
+ /** create a file of more than 1K size */
+ private void createFile( File newFile ) throws IOException {
+ newFile.createNewFile();
+ RandomAccessFile file = new RandomAccessFile(newFile, "rw");
+ file.seek(1024);
+ file.writeBytes("du test du test");
+ file.getFD().sync();
+ file.close();
+ }
+
+ /** delete a file */
+ private void rmFile(File file) {
+ if(file.exists()) {
+ assertTrue(file.delete());
+ }
+ }
+
+ /* interval is in a unit of minutes */
+ private void testDU(long interval) throws IOException {
+ rmFile(DU_FILE1);
+ rmFile(DU_FILE2);
+ try {
+ createFile(DU_FILE1);
+ DU du = new DU(DU_DIR, interval*60000);
+ long oldUsedSpace = du.getUsed();
+ assertTrue(oldUsedSpace>0); // make sure that du is called
+ createFile(DU_FILE2);
+ if(interval>0) {
+ assertEquals( oldUsedSpace, du.getUsed()); // du does not get called
+ } else {
+ assertTrue( oldUsedSpace < du.getUsed()); // du gets called again
+ }
+ } finally {
+ rmFile(DU_FILE1);
+ rmFile(DU_FILE2);
+ }
+ }
+
+ public void testDU() throws Exception {
+ testDU(Long.MIN_VALUE/60000); // test a negative interval
+ testDU(0L); // test a zero interval
+ testDU(10L); // interval equal to 10mins
+ testDU(System.currentTimeMillis()/60000+60); // test a very big interval
+ }
+
+}
Propchange: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestDU.java
------------------------------------------------------------------------------
svn:keywords = Id Revision HeadURL