You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/10/31 23:35:08 UTC
svn commit: r469685 - in /lucene/hadoop/trunk: ./
src/examples/org/apache/hadoop/examples/ src/java/
src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/
src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/
src/java/org/apache/hado...
Author: cutting
Date: Tue Oct 31 14:35:06 2006
New Revision: 469685
URL: http://svn.apache.org/viewvc?view=rev&rev=469685
Log:
HADOOP-399. Fix javadoc warnings. Contributed by Nigel.
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiBenchmark.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeProtocol.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java
lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java
lucene/hadoop/trunk/src/java/overview.html
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Oct 31 14:35:06 2006
@@ -105,6 +105,8 @@
error. Also, change the hadoop-daemon.sh script to rotate
standard i/o log files. (Raghu Angadi via cutting)
+29. HADOOP-399. Fix javadoc warnings. (Nigel Daley via cutting)
+
Release 0.7.2 - 2006-10-18
Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/ExampleDriver.java Tue Oct 31 14:35:06 2006
@@ -19,15 +19,13 @@
package org.apache.hadoop.examples;
import org.apache.hadoop.util.ProgramDriver;
+/**
+ * A description of an example program based on its class and a
+ * human-readable description.
+ * @author Owen O'Malley
+ */
public class ExampleDriver {
- /**
- * A description of an example program based on its class and a
- * human-readable description.
- * @author Owen O'Malley
- * @date april 2006
- */
-
public static void main(String argv[]){
ProgramDriver pgd = new ProgramDriver();
try {
Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiBenchmark.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiBenchmark.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiBenchmark.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiBenchmark.java Tue Oct 31 14:35:06 2006
@@ -58,7 +58,7 @@
/** Map method.
* @param key
- * @param value not-used.
+ * @param val not-used
* @param out
* @param reporter
*/
@@ -101,7 +101,7 @@
conf = job;
}
/** Reduce method.
- * @ param key
+ * @param key
* @param values
* @param output
* @param reporter
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configuration.java Tue Oct 31 14:35:06 2006
@@ -58,7 +58,7 @@
* This String is processed for <b>variable expansion</b>. The available variables are the
* <em>System properties</em> and the <em>other properties</em> defined in this Configuration.
* <p>The only <tt>get*</tt> method that is not processed for variable expansion is
- * {@link getObject} (as it cannot assume that the returned values are String).
+ * {@link #getObject(String)} (as it cannot assume that the returned values are String).
* You can use <tt>getObject</tt> to obtain the raw value of a String property without
* variable expansion: if <tt>(String)conf.getObject("my.jdk")</tt> is <tt>"JDK ${java.version}"</tt>
* then conf.get("my.jdk")</tt> is <tt>"JDK 1.5.0"</tt>
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Tue Oct 31 14:35:06 2006
@@ -191,12 +191,12 @@
/**
* Set the replication for files that match file pattern <i>srcf</i>
* if it's a directory and recursive is true,
- * set replication for all the subdirs and those files too
- * @param newRep: new replication factor
- * @param srcf: a file pattern specifying source files
- * @param recursive: if need to set replication factor for files in subdirs
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * set replication for all the subdirs and those files too.
+ * @param newRep new replication factor
+ * @param srcf a file pattern specifying source files
+ * @param recursive if need to set replication factor for files in subdirs
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void setReplication(short newRep, String srcf, boolean recursive)
throws IOException {
@@ -248,11 +248,11 @@
/**
- * Get a listing of all files in DFS that match the file pattern <i>srcf</i>
- * @param srcf: a file pattern specifying source files
- * @param recursive: if need to list files in subdirs
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * Get a listing of all files in DFS that match the file pattern <i>srcf</i>.
+ * @param srcf a file pattern specifying source files
+ * @param recursive if need to list files in subdirs
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void ls(String srcf, boolean recursive) throws IOException {
Path[] srcs = fs.globPaths( new Path(srcf) );
@@ -286,10 +286,10 @@
}
/**
- * Show the size of all files in DFS that match the file pattern <i>srcf</i>
- * @param srcf: a file pattern specifying source files
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * Show the size of all files in DFS that match the file pattern <i>src</i>
+ * @param src a file pattern specifying source files
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void du(String src) throws IOException {
Path items[] = fs.listPaths( fs.globPaths( new Path(src) ) );
@@ -317,10 +317,10 @@
* to a destination dfs file.
* When moving mutiple files, the destination must be a directory.
* Otherwise, IOException is thrown.
- * @param srcf: a file pattern specifying source files
- * @param dstf: a destination local file/directory
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * @param srcf a file pattern specifying source files
+ * @param dstf a destination local file/directory
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void rename(String srcf, String dstf) throws IOException {
Path [] srcs = fs.globPaths( new Path(srcf) );
@@ -403,10 +403,10 @@
* to a destination dfs file.
* When copying mutiple files, the destination must be a directory.
* Otherwise, IOException is thrown.
- * @param srcf: a file pattern specifying source files
- * @param dstf: a destination local file/directory
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * @param srcf a file pattern specifying source files
+ * @param dstf a destination local file/directory
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void copy(String srcf, String dstf, Configuration conf) throws IOException {
Path [] srcs = fs.globPaths( new Path(srcf) );
@@ -481,11 +481,11 @@
}
/**
- * Delete all files in DFS that match the file pattern <i>srcf</i>
- * @param srcf: a file pattern specifying source files
- * @param recursive: if need to delete subdirs
- * @exception: IOException
- * @see org.apache.hadoop.fs.FileSystem.globPaths
+ * Delete all files in DFS that match the file pattern <i>srcf</i>.
+ * @param srcf a file pattern specifying source files
+ * @param recursive if need to delete subdirs
+ * @throws IOException
+ * @see org.apache.hadoop.fs.FileSystem#globPaths(Path)
*/
public void delete(String srcf, boolean recursive) throws IOException {
Path [] srcs = fs.globPaths( new Path(srcf) );
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java Tue Oct 31 14:35:06 2006
@@ -38,11 +38,11 @@
* <li>files with blocks that are completely missing from all datanodes.<br/>
* In this case the tool can perform one of the following actions:
* <ul>
- * <li>none ({@link #FIXING_NONE})</li>
+ * <li>none ({@link NamenodeFsck#FIXING_NONE})</li>
* <li>move corrupted files to /lost+found directory on DFS
- * ({@link #FIXING_MOVE}). Remaining data blocks are saved as a
+ * ({@link NamenodeFsck#FIXING_MOVE}). Remaining data blocks are saved as a
* block chains, representing longest consecutive series of valid blocks.</li>
- * <li>delete corrupted files ({@link #FIXING_DELETE})</li>
+ * <li>delete corrupted files ({@link NamenodeFsck#FIXING_DELETE})</li>
* </ul>
* </li>
* <li>detect files with under-replicated or over-replicated blocks</li>
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeProtocol.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeProtocol.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeProtocol.java Tue Oct 31 14:35:06 2006
@@ -51,10 +51,10 @@
/**
* Register Datanode.
*
- * @see DataNode#register()
- * @see FSNamesystem#registerDatanode(DatanodeRegistration)
+ * @see org.apache.hadoop.dfs.DataNode#register()
+ * @see org.apache.hadoop.dfs.FSNamesystem#registerDatanode(DatanodeRegistration)
*
- * @return updated {@link DatanodeRegistration}, which contains
+ * @return updated {@link org.apache.hadoop.dfs.DatanodeRegistration}, which contains
* new storageID if the datanode did not have one and
* registration ID for further communication.
*/
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java Tue Oct 31 14:35:06 2006
@@ -258,10 +258,8 @@
/**
* Enter, leave or get safe mode.
- * See {@link ClientProtocol#setSafeMode(FSConstants.SafeModeAction)}
- * for more details.
*
- * @see ClientProtocol#setSafeMode(FSConstants.SafeModeAction)
+ * @see org.apache.hadoop.dfs.ClientProtocol#setSafeMode(FSConstants.SafeModeAction)
*/
public boolean setSafeMode( FSConstants.SafeModeAction action )
throws IOException {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java Tue Oct 31 14:35:06 2006
@@ -88,12 +88,11 @@
/**
* Filesystem checker.
- * @param conf current Configuration
- * @param fixing one of pre-defined values
- * @param showFiles show each file being checked
- * @param showBlocks for each file checked show its block information
- * @param showLocations for each block in each file show block locations
- * @throws Exception
+ * @param conf configuration (namenode config)
+ * @param nn namenode that this fsck is going to use
+ * @param pmap key=value[] map that is passed to the http servlet as url parameters
+ * @param response the object into which this servelet writes the url contents
+ * @throws IOException
*/
public NamenodeFsck(Configuration conf,
NameNode nn,
@@ -457,7 +456,6 @@
/**
* DFS is considered healthy if there are no missing blocks.
- * @return
*/
public boolean isHealthy() {
return missingIds.size() == 0;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileSystem.java Tue Oct 31 14:35:06 2006
@@ -537,7 +537,7 @@
/**
* Filter raw files in a list directories using the default checksum filter.
- * @param files: a list of paths
+ * @param files a list of paths
* @return a list of files under the source paths
* @exception IOException
*/
@@ -547,7 +547,7 @@
/**
* Filter raw files in a list directories using user-supplied path filter.
- * @param files: a list of paths
+ * @param files a list of paths
* @return a list of files under the source paths
* @exception IOException
*/
@@ -604,7 +604,7 @@
* </dd>
* </dl>
*
- * @param filePattern: a regular expression specifying file pattern
+ * @param filePattern a regular expression specifying file pattern
* @return an array of paths that match the file pattern
* @throws IOException
@@ -614,8 +614,7 @@
}
/** glob all the file names that matches filePattern
- * and is accepted by filter
- * @param
+ * and is accepted by filter.
*/
public Path[] globPaths(Path filePattern, PathFilter filter)
throws IOException {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FileUtil.java Tue Oct 31 14:35:06 2006
@@ -306,7 +306,7 @@
* Create a soft link between a src and destination
* only on a local disk. HDFS does not support this
* @param target the target for symlink
- * @param destination the symlink
+ * @param linkname the symlink
* @return value returned by the command
*/
public static int symLink(String target, String linkname) throws IOException{
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/Path.java Tue Oct 31 14:35:06 2006
@@ -123,7 +123,7 @@
return new Path(isAbsolute, newElements, drive);
}
- /** Adds a suffix to a the final name in the path.*/
+ /** Adds a suffix to the final name in the path.*/
public Path suffix(String suffix) {
return new Path(getParent(), getName()+suffix);
}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/BytesWritable.java Tue Oct 31 14:35:06 2006
@@ -60,7 +60,6 @@
/**
* Get the current size of the buffer.
- * @return
*/
public int getSize() {
return size;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/SequenceFile.java Tue Oct 31 14:35:06 2006
@@ -56,7 +56,7 @@
public static final int SYNC_INTERVAL = 100*SYNC_SIZE;
/** The type of compression.
- * @see SequenceFile#Writer
+ * @see SequenceFile.Writer
*/
public static enum CompressionType {
/** Do not compress records. */
@@ -596,9 +596,9 @@
/** Returns the current length of the output file.
*
- * <p>This always returns a synchronized position. In other words, {@link
- * immediately after calling {@link Reader#seek(long)} with a position
- * returned by this method, Reader#next(Writable) may be called. However
+ * <p>This always returns a synchronized position. In other words,
+ * immediately after calling {@link SequenceFile.Reader#seek(long)} with a position
+ * returned by this method, {@link SequenceFile.Reader#next(Writable)} may be called. However
* the key may be earlier in the file than key last written when this
* method was called (e.g., with block-compression, it may be the first key
* in the block that was being written when this method was called).
@@ -1388,8 +1388,8 @@
/** Set the current byte position in the input file.
*
* <p>The position passed must be a position returned by {@link
- * Writer#getLength()} when writing this file. To seek to an arbitrary
- * position, use {@link Reader#sync(long)}.
+ * SequenceFile.Writer#getLength()} when writing this file. To seek to an arbitrary
+ * position, use {@link SequenceFile.Reader#sync(long)}.
*/
public synchronized void seek(long position) throws IOException {
in.seek(position);
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/Text.java Tue Oct 31 14:35:06 2006
@@ -96,7 +96,7 @@
* Returns the Unicode Scalar Value (32-bit integer value)
* for the character at <code>position</code>. Note that this
* method avoids using the converter or doing String instatiation
- * @returns the Unicode scalar value at position or -1
+ * @return the Unicode scalar value at position or -1
* if the position is invalid or points to a
* trailing byte
*/
@@ -410,8 +410,8 @@
/**
* Check if a byte array contains valid utf-8
- * @param utf8: byte array
- * @exception MalformedInputException if the byte array contains invalid utf-8
+ * @param utf8 byte array
+ * @throws MalformedInputException if the byte array contains invalid utf-8
*/
public static void validateUTF8(byte[] utf8) throws MalformedInputException {
validateUTF8(utf8, 0, utf8.length);
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/WritableUtils.java Tue Oct 31 14:35:06 2006
@@ -336,10 +336,8 @@
/**
* Get the encoded length if an integer is stored in a variable-length format
- * @param i: a long
* @return the encoded length
*/
-
public static int getVIntSize(long i) {
if (i >= -112 && i <= 127) {
return 1;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/io/compress/CompressionInputStream.java Tue Oct 31 14:35:06 2006
@@ -39,7 +39,7 @@
/**
* Create a compression input stream that reads
* the decompressed bytes from the given stream.
- * @param out
+ * @param in
*/
protected CompressionInputStream(InputStream in) {
this.in = in;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/FileSplit.java Tue Oct 31 14:35:06 2006
@@ -30,7 +30,7 @@
/** A section of an input file. Returned by {@link
* InputFormat#getSplits(FileSystem, JobConf, int)} and passed to
- * InputFormat#getRecordReader(FileSystem,FileSplit,JobConf,Reporter). */
+ * {@link InputFormat#getRecordReader(FileSystem,FileSplit,JobConf,Reporter)}. */
public class FileSplit implements Writable {
private Path file;
private long start;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Tue Oct 31 14:35:06 2006
@@ -73,7 +73,6 @@
/**
* Construct a map/reduce job configuration.
- * @param conf a Configuration whose settings will be inherited.
* @param exampleClass a class whose containing jar is used as the job's jar.
*/
public JobConf(Class exampleClass) {
@@ -83,8 +82,7 @@
/**
* Construct a map/reduce job configuration.
*
- * @param conf
- * a Configuration whose settings will be inherited.
+ * @param conf a Configuration whose settings will be inherited.
*/
public JobConf(Configuration conf) {
super(conf);
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobHistory.java Tue Oct 31 14:35:06 2006
@@ -225,7 +225,6 @@
* Convert value from history to int and return.
* if no value is found it returns 0.
* @param k key
- * @return
*/
public int getInt(Keys k){
String s = values.get(k);
@@ -238,7 +237,6 @@
* Convert value from history to int and return.
* if no value is found it returns 0.
* @param k
- * @return
*/
public long getLong(Keys k){
String s = values.get(k);
@@ -271,7 +269,6 @@
}
/**
* Returns Map containing all key-values.
- * @return
*/
public Map<Keys, String> getValues(){
return values;
@@ -292,7 +289,6 @@
/**
* Returns all map and reduce tasks <taskid-Task>.
- * @return
*/
public Map<String, Task> getAllTasks() { return allTasks; }
@@ -486,12 +482,12 @@
}
/**
* Returns all task attempts for this task. <task attempt id - TaskAttempt>
- * @return
*/
public Map<String, TaskAttempt> getTaskAttempts(){
return this.taskAttempts;
}
}
+
/**
* Base class for Map and Reduce TaskAttempts.
*/
@@ -745,4 +741,4 @@
isRunning = false ;
}
}
-}
\ No newline at end of file
+}
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/SequenceFileInputFilter.java Tue Oct 31 14:35:06 2006
@@ -107,8 +107,8 @@
public static class RegexFilter extends FilterBase {
private Pattern p;
/** Define the filtering regex and stores it in conf
- * @argument conf where the regex is set
- * @argument regex regex used as a filter
+ * @param conf where the regex is set
+ * @param regex regex used as a filter
*/
public static void setPattern(Configuration conf, String regex )
throws PatternSyntaxException {
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/StatusHttpServer.java Tue Oct 31 14:35:06 2006
@@ -103,10 +103,10 @@
}
/**
- * Add a servlet in the server
+ * Add a servlet in the server.
* @param name The name of the servlet (can be passed as null)
* @param pathSpec The path spec for the servlet
- * @param classname The class name for the servlet
+ * @param servletClass The servlet class
*/
public <T extends HttpServlet>
void addServlet(String name, String pathSpec,
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/TaskTracker.java Tue Oct 31 14:35:06 2006
@@ -408,7 +408,6 @@
}
/**Return the DFS filesystem
- * @return
*/
public FileSystem getFileSystem(){
return fs;
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java Tue Oct 31 14:35:06 2006
@@ -77,8 +77,8 @@
}
/**
- * Print all of the thread's information and stack traces
- * @author Owen O'Malley
+ * Print all of the thread's information and stack traces.
+ *
* @param stream the stream to
* @param title a string title for the stack trace
*/
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java Tue Oct 31 14:35:06 2006
@@ -157,7 +157,6 @@
/**
*
* @param uris
- * @return
*/
public static String uriToString(URI[] uris){
String ret = null;
@@ -171,7 +170,6 @@
/**
*
* @param str
- * @return
*/
public static URI[] stringToURI(String[] str){
if (str == null)
@@ -192,7 +190,6 @@
/**
*
* @param str
- * @return
*/
public static Path[] stringToPath(String[] str){
Path[] p = new Path[str.length];
@@ -208,8 +205,7 @@
* If finish time comes before start time then negative valeus of X, Y and Z wil return.
*
* @param finishTime finish time
- * @param statTime start time
- * @return
+ * @param startTime start time
*/
public static String formatTimeDiff(long finishTime, long startTime){
StringBuffer buf = new StringBuffer() ;
Modified: lucene/hadoop/trunk/src/java/overview.html
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/overview.html?view=diff&rev=469685&r1=469684&r2=469685
==============================================================================
--- lucene/hadoop/trunk/src/java/overview.html (original)
+++ lucene/hadoop/trunk/src/java/overview.html Tue Oct 31 14:35:06 2006
@@ -134,7 +134,7 @@
<h3>Bootstrapping</h3>
-<p>A new distributed filesystem must formatted with the following
+<p>A new distributed filesystem must be formatted with the following
command, run on the master node:</p>
<p><tt>bin/hadoop namenode -format</tt></p>