You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sh...@apache.org on 2009/04/22 01:39:34 UTC
svn commit: r767331 [1/2] - in /hadoop/core/trunk: ./ bin/
src/docs/src/documentation/content/xdocs/
src/hdfs/org/apache/hadoop/hdfs/server/namenode/
src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/
src/test/org/apache/hadoop/hdfs/tools/ src/t...
Author: shv
Date: Tue Apr 21 23:39:33 2009
New Revision: 767331
URL: http://svn.apache.org/viewvc?rev=767331&view=rev
Log:
HADOOP-5467. Introduce offline fsimage image viewer. Contributed by Jakob Homan.
Added:
hadoop/core/trunk/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/IndentedImageVisitor.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOIVCanReadOldVersions.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18 (with props)
hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19 (with props)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/bin/hdfs
hadoop/core/trunk/build.xml
hadoop/core/trunk/src/docs/src/documentation/content/xdocs/site.xml
hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java
Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=767331&r1=767330&r2=767331&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Tue Apr 21 23:39:33 2009
@@ -86,6 +86,8 @@
HADOOP-5469. Add /metrics servlet to daemons, providing metrics
over HTTP as either text or JSON. (Philip Zeyliger via cutting)
+ HADOOP-5467. Introduce offline fsimage image viewer. (Jakob Homan via shv)
+
IMPROVEMENTS
HADOOP-4565. Added CombineFileInputFormat to use data locality information
Modified: hadoop/core/trunk/bin/hdfs
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hdfs?rev=767331&r1=767330&r2=767331&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hdfs (original)
+++ hadoop/core/trunk/bin/hdfs Tue Apr 21 23:39:33 2009
@@ -31,6 +31,7 @@
echo " fsck run a DFS filesystem checking utility"
echo " balancer run a cluster balancing utility"
echo " jmxget get JMX exported values from NameNode or DataNode."
+ echo " oiv apply the offline fsimage viewer to an fsimage"
echo " Use -help to see options"
echo ""
echo "Most commands print help when invoked w/o parameters."
@@ -67,6 +68,8 @@
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_BALANCER_OPTS"
elif [ "$COMMAND" = "jmxget" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.JMXGet
+elif [ "$COMMAND" = "oiv" ] ; then
+ CLASS=org.apache.hadoop.hdfs.tools.offlineImageViewer.OfflineImageViewer
else
echo $COMMAND - invalid command
print_usage
Modified: hadoop/core/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/build.xml?rev=767331&r1=767330&r2=767331&view=diff
==============================================================================
--- hadoop/core/trunk/build.xml (original)
+++ hadoop/core/trunk/build.xml Tue Apr 21 23:39:33 2009
@@ -701,6 +701,8 @@
<copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
<copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
<copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
</target>
<!-- ================================================================== -->
Added: hadoop/core/trunk/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml (added)
+++ hadoop/core/trunk/src/docs/src/documentation/content/xdocs/hdfs_imageviewer.xml Tue Apr 21 23:39:33 2009
@@ -0,0 +1,187 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!DOCTYPE document PUBLIC "-//APACHE//DTD Documentation V2.0//EN" "http://forrest.apache.org/dtd/document-v20.dtd">
+
+<document>
+
+ <header>
+ <title>Offline Image Viewer Guide</title>
+ </header>
+
+ <body>
+
+ <section>
+ <title>Overview</title>
+
+ <p>The Offline Image Viewer is a tool to dump the contents of hdfs
+ fsimage files to human-readable formats in order to allow offline analysis
+ and examination of an Hadoop cluster's namespace. The tool is able to
+ process very large image files relatively quickly, converting them to
+ one of several output formats. The tool handles the layout formats that
+ were included with Hadoop versions 16 and up. If the tool is not able to
+ process an image file, it will exit cleanly. The Offline Image Viewer does not require
+ an Hadoop cluster to be running; it is entirely offline in its operation.</p>
+
+ <p>The Offline Image Viewer provides several output processors:</p>
+ <ol>
+ <li><strong>Ls</strong> is the default output processor. It closely mimics the format of
+ the <code>lsr </code> command. It includes the same fields, in the same order, as
+ <code>lsr </code>: directory or file flag, permissions, replication, owner, group,
+ file size, modification date, and full path. Unlike the <code>lsr </code> command,
+ the root path is included. One important difference between the output
+ of the <code>lsr </code> command this processor, is that this output is not sorted
+ by directory name and contents. Rather, the files are listed in the
+ order in which they are stored in the fsimage file. Therefore, it is
+ not possible to directly compare the output of the <code>lsr </code> command this
+ this tool. The Ls processor uses information contained within the Inode blocks to
+ calculate file sizes and ignores the <code>-skipBlocks</code> option.</li>
+ <li><strong>Indented</strong> provides a more complete view of the fsimage's contents,
+ including all of the information included in the image, such as image
+ version, generation stamp and inode- and block-specific listings. This
+ processor uses indentation to organize the output into a hierarchal manner.
+ The <code>lsr </code> format is suitable for easy human comprehension.</li>
+ <li><strong>XML</strong> creates an XML document of the fsimage and includes all of the
+ information within the fsimage, similar to the <code>lsr </code> processor. The output
+ of this processor is amenable to automated processing and analysis with XML tools.
+ Due to the verbosity of the XML syntax, this processor will also generate
+ the largest amount of output.</li>
+ </ol>
+
+ </section> <!-- overview -->
+
+ <section>
+ <title>Usage</title>
+
+ <section>
+ <title>Basic</title>
+ <p>The simplest usage of the Offline Image Viewer is to provide just an input and output
+ file, via the <code>-i</code> and <code>-o</code> command-line switches:</p>
+
+ <p><code>bash$ bin/hdfs oiv -i fsimage -o fsimage.txt</code><br/></p>
+
+ <p>This will create a file named fsimage.txt in the current directory using
+ the Ls output processor. For very large image files, this process may take
+ several minutes.</p>
+
+ <p>One can specify which output processor via the command-line switch <code>-p</code>.
+ For instance:</p>
+ <p><code>bash$ bin/hdfs oiv -i fsimage -o fsimage.xml -p XML</code><br/></p>
+
+ <p>or</p>
+
+ <p><code>bash$ bin/hdfs oiv -i fsimage -o fsimage.txt -p Indented</code><br/></p>
+
+ <p>This will run the tool using either the XML or Indented output processor,
+ respectively.</p>
+
+ <p>One command-line option worth considering is <code>-skipBlocks</code>, which
+ prevents the tool from explicitly enumerating all of the blocks that make up
+ a file in the namespace. This is useful for file systems that have very large
+ files. Enabling this option can significantly decrease the size of the resulting
+ output, as individual blocks are not included. Note, however, that the Ls processor
+ needs to enumerate the blocks and so overrides this option.</p>
+
+ </section> <!-- Basic -->
+ <section id="Example">
+ <title>Example</title>
+
+ <p>Consider the following contrived namespace:</p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:17 /anotherDir</code></p>
+ <p><code>-rw-r--r-- 3 theuser supergroup 286631664 2009-03-16 21:15 /anotherDir/biggerfile</code></p>
+ <p><code>-rw-r--r-- 3 theuser supergroup 8754 2009-03-16 21:17 /anotherDir/smallFile</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:11 /mapredsystem</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:11 /mapredsystem/theuser</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:11 /mapredsystem/theuser/mapredsystem</code></p>
+ <p><code>drwx-wx-wx - theuser supergroup 0 2009-03-16 21:11 /mapredsystem/theuser/mapredsystem/ip.redacted.com</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:12 /one</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:12 /one/two</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:16 /user</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 21:19 /user/theuser</code></p>
+ <p>Applying the Offline Image Processor against this file with default options would result in the following output:</p>
+ <p><code>machine:hadoop-0.21.0-dev theuser$ bin/hdfs oiv -i fsimagedemo -o fsimage.txt</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:16 /</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:17 /anotherDir</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:11 /mapredsystem</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:12 /one</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:16 /user</code></p>
+ <p><code>-rw-r--r-- 3 theuser supergroup 286631664 2009-03-16 14:15 /anotherDir/biggerfile</code></p>
+ <p><code>-rw-r--r-- 3 theuser supergroup 8754 2009-03-16 14:17 /anotherDir/smallFile</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:11 /mapredsystem/theuser</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:11 /mapredsystem/theuser/mapredsystem</code></p>
+ <p><code>drwx-wx-wx - theuser supergroup 0 2009-03-16 14:11 /mapredsystem/theuser/mapredsystem/ip.redacted.com</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:12 /one/two</code></p>
+ <p><code>drwxr-xr-x - theuser supergroup 0 2009-03-16 14:19 /user/theuser</code></p>
+ <p>Similarly, applying the Indented processor would generate output that begins with:</p>
+ <p><code>machine:hadoop-0.21.0-dev theuser$ bin/hdfs oiv -i fsimagedemo -p Indented -o fsimage.txt</code></p>
+ <p><code>FSImage</code></p>
+ <p><code> ImageVersion = -19</code></p>
+ <p><code> NamespaceID = 2109123098</code></p>
+ <p><code> GenerationStamp = 1003</code></p>
+ <p><code> INodes [NumInodes = 12]</code></p>
+ <p><code> Inode</code></p>
+ <p><code> INodePath = </code></p>
+ <p><code> Replication = 0</code></p>
+ <p><code> ModificationTime = 2009-03-16 14:16</code></p>
+ <p><code> AccessTime = 1969-12-31 16:00</code></p>
+ <p><code> BlockSize = 0</code></p>
+ <p><code> Blocks [NumBlocks = -1]</code></p>
+ <p><code> NSQuota = 2147483647</code></p>
+ <p><code> DSQuota = -1</code></p>
+ <p><code> Permissions</code></p>
+ <p><code> Username = theuser</code></p>
+ <p><code> GroupName = supergroup</code></p>
+ <p><code> PermString = rwxr-xr-x</code></p>
+ <p><code>…remaining output omitted…</code></p>
+ </section> <!-- example-->
+
+ </section>
+
+ <section id="options">
+ <title>Options</title>
+
+ <section>
+ <title>Option Index</title>
+ <table>
+ <tr><th> Flag </th><th> Description </th></tr>
+ <tr><td><code>[-i|--inputFile] <input file></code></td>
+ <td>Specify the input fsimage file to process. Required.</td></tr>
+ <tr><td><code>[-o|--outputFile] <output file></code></td>
+ <td>Specify the output filename, if the specified output processor
+ generates one. If the specified file already exists, it is silently overwritten. Required.
+ </td></tr>
+ <tr><td><code>[-p|--processor] <processor></code></td>
+ <td>Specify the image processor to apply against the image file. Currently
+ valid options are Ls (default), XML and Indented..
+ </td></tr>
+ <tr><td><code>-skipBlocks</code></td>
+ <td>Do not enumerate individual blocks within files. This may save processing time
+ and outfile file space on namespaces with very large files. The <code>Ls</code> processor reads
+ the blocks to correctly determine file sizes and ignores this option.</td></tr>
+ <tr><td><code>-printToScreen</code></td>
+ <td>Pipe output of processor to console as well as specified file. On extremely
+ large namespaces, this may increase processing time by an order of magnitude.</td></tr>
+ <tr><td><code>[-h|--help]</code></td>
+ <td>Display the tool usage and help information and exit.</td></tr>
+ </table>
+ </section> <!-- options -->
+ </section>
+
+ </body>
+
+</document>
Modified: hadoop/core/trunk/src/docs/src/documentation/content/xdocs/site.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/docs/src/documentation/content/xdocs/site.xml?rev=767331&r1=767330&r2=767331&view=diff
==============================================================================
--- hadoop/core/trunk/src/docs/src/documentation/content/xdocs/site.xml (original)
+++ hadoop/core/trunk/src/docs/src/documentation/content/xdocs/site.xml Tue Apr 21 23:39:33 2009
@@ -58,6 +58,7 @@
<hdfs_perm label="Permissions Guide" href="hdfs_permissions_guide.html" />
<hdfs_quotas label="Quotas Guide" href="hdfs_quota_admin_guide.html" />
<hdfs_SLG label="Synthetic Load Generator Guide" href="SLG_user_guide.html" />
+ <hdfs_imageviewer label="Offline Image Viewer Guide" href="hdfs_imageviewer.html" />
<hdfs_libhdfs label="C API libhdfs" href="libhdfs.html" />
</docs>
Modified: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java?rev=767331&r1=767330&r2=767331&view=diff
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java (original)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java Tue Apr 21 23:39:33 2009
@@ -1840,7 +1840,10 @@
}
static private final UTF8 U_STR = new UTF8();
- static String readString(DataInputStream in) throws IOException {
+ // This should be reverted to package private once the ImageLoader
+ // code is moved into this package. This method should not be called
+ // by other code.
+ public static String readString(DataInputStream in) throws IOException {
U_STR.readFields(in);
return U_STR.toString();
}
@@ -1850,7 +1853,8 @@
return s.isEmpty()? null: s;
}
- static byte[] readBytes(DataInputStream in) throws IOException {
+ // Same comments apply for this method as for readString()
+ public static byte[] readBytes(DataInputStream in) throws IOException {
U_STR.readFields(in);
int len = U_STR.getLength();
byte[] bytes = new byte[len];
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoader.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+
+/**
+ * An ImageLoader can accept a DataInputStream to an Hadoop FSImage file
+ * and walk over its structure using the supplied ImageVisitor.
+ *
+ * Each implementation of ImageLoader is designed to rapidly process an
+ * image file. As long as minor changes are made from one layout version
+ * to another, it is acceptable to tweak one implementation to read the next.
+ * However, if the layout version changes enough that it would make a
+ * processor slow or difficult to read, another processor should be created.
+ * This allows each processor to quickly read an image without getting
+ * bogged down in dealing with significant differences between layout versions.
+ */
+interface ImageLoader {
+
+ /**
+ * @param in DataInputStream pointing to an Hadoop FSImage file
+ * @param v Visit to apply to the FSImage file
+ * @param enumerateBlocks Should visitor visit each of the file blocks?
+ */
+ public void loadImage(DataInputStream in, ImageVisitor v,
+ boolean enumerateBlocks) throws IOException;
+
+ /**
+ * Can this processor handle the specified version of FSImage file?
+ *
+ * @param version FSImage version file
+ * @return True if this instance can process the file
+ */
+ public boolean canLoadVersion(int version);
+
+ /**
+ * Factory for obtaining version of image loader that can read
+ * a particular image format.
+ */
+ public class LoaderFactory {
+ // Java doesn't support static methods on interfaces, which necessitates
+ // this factory class
+
+ /**
+ * Find an image loader capable of interpreting the specified
+ * layout version number. If none, return null;
+ *
+ * @param imageVersion fsimage layout version number to be processed
+ * @return ImageLoader that can interpret specified version, or null
+ */
+ static public ImageLoader getLoader(int version) {
+ // Easy to add more image processors as they are written
+ ImageLoader[] loaders = { new ImageLoaderCurrent() };
+
+ for (ImageLoader l : loaders) {
+ if (l.canLoadVersion(version))
+ return l;
+ }
+
+ return null;
+ }
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageLoaderCurrent.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,296 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
+import org.apache.hadoop.hdfs.server.namenode.FSImage;
+import org.apache.hadoop.hdfs.tools.offlineImageViewer.ImageVisitor.ImageElement;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableUtils;
+
+/**
+ * ImageLoaderCurrent processes Hadoop FSImage files and walks over
+ * them using a provided ImageVisitor, calling the visitor at each element
+ * enumerated below.
+ *
+ * The only difference between v18 and v19 was the utilization of the
+ * stickybit. Therefore, the same viewer can reader either format.
+ *
+ * Versions -19 fsimage layout (with changes from -16 up):
+ * Image version (int)
+ * Namepsace ID (int)
+ * NumFiles (long)
+ * Generation stamp (long)
+ * INodes (count = NumFiles)
+ * INode
+ * Path (String)
+ * Replication (short)
+ * Modification Time (long as date)
+ * Access Time (long) // added in -16
+ * Block size (long)
+ * Num blocks (int)
+ * Blocks (count = Num blocks)
+ * Block
+ * Block ID (long)
+ * Num bytes (long)
+ * Generation stamp (long)
+ * Namespace Quota (long)
+ * Diskspace Quota (long) // added in -18
+ * Permissions
+ * Username (String)
+ * Groupname (String)
+ * OctalPerms (short -> String) // Modified in -19
+ * NumINodesUnderConstruction (int)
+ * INodesUnderConstruction (count = NumINodesUnderConstruction)
+ * INodeUnderConstruction
+ * Path (bytes as string)
+ * Replication (short)
+ * Modification time (long as date)
+ * Preferred block size (long)
+ * Num blocks (int)
+ * Blocks
+ * Block
+ * Block ID (long)
+ * Num bytes (long)
+ * Generation stamp (long)
+ * Permissions
+ * Username (String)
+ * Groupname (String)
+ * OctalPerms (short -> String)
+ * Client Name (String)
+ * Client Machine (String)
+ * NumLocations (int)
+ * DatanodeDescriptors (count = numLocations) // not loaded into memory
+ * short // but still in file
+ * long
+ * string
+ * long
+ * int
+ * string
+ * string
+ * enum
+ *
+ */
+class ImageLoaderCurrent implements ImageLoader {
+ protected final DateFormat dateFormat =
+ new SimpleDateFormat("yyyy-MM-dd HH:mm");
+ private static int [] versions = {-16, -17, -18, -19};
+ private int imageVersion = 0;
+
+ /* (non-Javadoc)
+ * @see ImageLoader#canProcessVersion(int)
+ */
+ @Override
+ public boolean canLoadVersion(int version) {
+ for(int v : versions)
+ if(v == version) return true;
+
+ return false;
+ }
+
+ /* (non-Javadoc)
+ * @see ImageLoader#processImage(java.io.DataInputStream, ImageVisitor, boolean)
+ */
+ @Override
+ public void loadImage(DataInputStream in, ImageVisitor v,
+ boolean skipBlocks) throws IOException {
+ try {
+ v.start();
+ v.visitEnclosingElement(ImageElement.FSImage);
+
+ imageVersion = in.readInt();
+ if( !canLoadVersion(imageVersion))
+ throw new IOException("Cannot process fslayout version " + imageVersion);
+
+ v.visit(ImageElement.ImageVersion, imageVersion);
+ v.visit(ImageElement.NamespaceID, in.readInt());
+
+ long numInodes = in.readLong();
+
+ v.visit(ImageElement.GenerationStamp, in.readLong());
+
+ processINodes(in, v, numInodes, skipBlocks);
+
+ processINodesUC(in, v, skipBlocks);
+
+ v.leaveEnclosingElement(); // FSImage
+ v.finish();
+ } catch(IOException e) {
+ // Tell the visitor to clean up, then re-throw the exception
+ v.finishAbnormally();
+ throw e;
+ }
+ }
+
+ /**
+ * Process the INodes under construction section of the fsimage.
+ *
+ * @param in DataInputStream to process
+ * @param v Visitor to walk over inodes
+ * @param numINUC Number of inodes recorded in file
+ * @param skipBlocks Walk over each block?
+ */
+ private void processINodesUC(DataInputStream in, ImageVisitor v,
+ boolean skipBlocks) throws IOException {
+ int numINUC = in.readInt();
+
+ v.visitEnclosingElement(ImageElement.INodesUnderConstruction,
+ ImageElement.NumINodesUnderConstruction, numINUC);
+
+ for(int i = 0; i < numINUC; i++) {
+ v.visitEnclosingElement(ImageElement.INodeUnderConstruction);
+ byte [] name = FSImage.readBytes(in);
+ String n = new String(name, "UTF8");
+ v.visit(ImageElement.INodePath, n);
+ v.visit(ImageElement.Replication, in.readShort());
+ v.visit(ImageElement.ModificationTime, formatDate(in.readLong()));
+
+ v.visit(ImageElement.PreferredBlockSize, in.readLong());
+ int numBlocks = in.readInt();
+ processBlocks(in, v, numBlocks, skipBlocks);
+
+ processPermission(in, v);
+ v.visit(ImageElement.ClientName, FSImage.readString(in));
+ v.visit(ImageElement.ClientMachine, FSImage.readString(in));
+
+ // Skip over the datanode descriptors, which are still stored in the
+ // file but are not used by the datanode or loaded into memory
+ int numLocs = in.readInt();
+ for(int j = 0; j < numLocs; j++) {
+ in.readShort();
+ in.readLong();
+ in.readLong();
+ in.readLong();
+ in.readInt();
+ FSImage.readString(in);
+ FSImage.readString(in);
+ WritableUtils.readEnum(in, AdminStates.class);
+ }
+
+ v.leaveEnclosingElement(); // INodeUnderConstruction
+ }
+
+ v.leaveEnclosingElement(); // INodesUnderConstruction
+ }
+
+ /**
+ * Process the blocks section of the fsimage.
+ *
+ * @param in Datastream to process
+ * @param v Visitor to walk over inodes
+ * @param skipBlocks Walk over each block?
+ */
+ private void processBlocks(DataInputStream in, ImageVisitor v,
+ int numBlocks, boolean skipBlocks) throws IOException {
+ v.visitEnclosingElement(ImageElement.Blocks,
+ ImageElement.NumBlocks, numBlocks);
+
+ if(numBlocks == -1) { // directory, no blocks to process
+ v.leaveEnclosingElement(); // Blocks
+ return;
+ }
+
+ if(skipBlocks) {
+ int bytesToSkip = ((Long.SIZE * 3 /* fields */) / 8 /*bits*/) * numBlocks;
+ if(in.skipBytes(bytesToSkip) != bytesToSkip)
+ throw new IOException("Error skipping over blocks");
+
+ } else {
+ for(int j = 0; j < numBlocks; j++) {
+ v.visitEnclosingElement(ImageElement.Block);
+ v.visit(ImageElement.BlockID, in.readLong());
+ v.visit(ImageElement.NumBytes, in.readLong());
+ v.visit(ImageElement.GenerationStamp, in.readLong());
+ v.leaveEnclosingElement(); // Block
+ }
+ }
+ v.leaveEnclosingElement(); // Blocks
+ }
+
+ /**
+ * Extract the INode permissions stored in the fsimage file.
+ *
+ * @param in Datastream to process
+ * @param v Visitor to walk over inodes
+ */
+ private void processPermission(DataInputStream in, ImageVisitor v)
+ throws IOException {
+ v.visitEnclosingElement(ImageElement.Permissions);
+ v.visit(ImageElement.Username, Text.readString(in));
+ v.visit(ImageElement.GroupName, Text.readString(in));
+ FsPermission fsp = new FsPermission(in.readShort());
+ v.visit(ImageElement.PermString, fsp.toString());
+ v.leaveEnclosingElement(); // Permissions
+ }
+
+ /**
+ * Process the INode records stored in the fsimage.
+ *
+ * @param in Datastream to process
+ * @param v Visitor to walk over INodes
+ * @param numInodes Number of INodes stored in file
+ * @param skipBlocks Process all the blocks within the INode?
+ * @throws VisitException
+ * @throws IOException
+ */
+ private void processINodes(DataInputStream in, ImageVisitor v,
+ long numInodes, boolean skipBlocks) throws IOException {
+ v.visitEnclosingElement(ImageElement.INodes,
+ ImageElement.NumInodes, numInodes);
+
+ for(long i = 0; i < numInodes; i++) {
+ v.visitEnclosingElement(ImageElement.Inode);
+ v.visit(ImageElement.INodePath, FSImage.readString(in));
+ v.visit(ImageElement.Replication, in.readShort());
+ v.visit(ImageElement.ModificationTime, formatDate(in.readLong()));
+ if(imageVersion <= -17) // added in version -17
+ v.visit(ImageElement.AccessTime, formatDate(in.readLong()));
+ v.visit(ImageElement.BlockSize, in.readLong());
+ int numBlocks = in.readInt();
+
+ processBlocks(in, v, numBlocks, skipBlocks);
+
+ if(numBlocks != 0) {
+ v.visit(ImageElement.NSQuota, numBlocks <= 0 ? in.readLong() : -1);
+ if(imageVersion <= -18) // added in version -18
+ v.visit(ImageElement.DSQuota, numBlocks <= 0 ? in.readLong() : -1);
+ }
+
+ processPermission(in, v);
+ v.leaveEnclosingElement(); // INode
+ }
+
+ v.leaveEnclosingElement(); // INodes
+ }
+
+ /**
+ * Helper method to format dates during processing.
+ * @param date Date as read from image file
+ * @return String version of date format
+ */
+ private String formatDate(long date) {
+ return dateFormat.format(new Date(date));
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/ImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+
+/**
+ * An implementation of ImageVisitor can traverse the structure of an
+ * Hadoop fsimage and respond to each of the structures within the file.
+ */
+abstract class ImageVisitor {
+
+ /**
+ * Structural elements of an FSImage that may be encountered within the
+ * file. ImageVisitors are able to handle processing any of these elements.
+ */
+ public enum ImageElement {
+ FSImage,
+ ImageVersion,
+ NamespaceID,
+ LayoutVersion,
+ NumInodes,
+ GenerationStamp,
+ INodes,
+ Inode,
+ INodePath,
+ Replication,
+ ModificationTime,
+ AccessTime,
+ BlockSize,
+ NumBlocks,
+ Blocks,
+ Block,
+ BlockID,
+ NumBytes,
+ NSQuota,
+ DSQuota,
+ Permissions,
+ NumINodesUnderConstruction,
+ INodesUnderConstruction,
+ INodeUnderConstruction,
+ PreferredBlockSize,
+ ClientName,
+ ClientMachine,
+ Username,
+ GroupName,
+ PermString
+ }
+
+ /**
+ * Utility class for tracking descent into the structure of the fsimage.
+ */
+ static protected class DepthCounter {
+ private int depth = 0;
+
+ public void incLevel() { depth++; }
+ public void decLevel() { if(depth >= 1) depth--; }
+ public int getLevel() { return depth; }
+ }
+
+ /**
+ * Begin visiting the fsimage structure. Opportunity to perform
+ * any initialization necessary for the implementing visitor.
+ */
+ abstract void start() throws IOException;
+
+ /**
+ * Finish visiting the fsimage structure. Opportunity to perform any
+ * clean up necessary for the implementing visitor.
+ */
+ abstract void finish() throws IOException;
+
+ /**
+ * Finish visiting the fsimage structure after an error has occurred
+ * during the processing. Opportunity to perform any clean up necessary
+ * for the implementing visitor.
+ */
+ abstract void finishAbnormally() throws IOException;
+
+ /**
+ * Visit element of fsimage with specified value.
+ *
+ * @param element FSImage element
+ * @param value Element's value
+ */
+ abstract void visit(ImageElement element, String value) throws IOException;
+
+ // Convenience methods to automatically convert numeric value types to strings
+ void visit(ImageElement element, int value) throws IOException {
+ visit(element, Integer.toString(value));
+ }
+
+ void visit(ImageElement element, long value) throws IOException {
+ visit(element, Long.toString(value));
+ }
+
+ /**
+ * Begin visiting an element that encloses another element, such as
+ * the beginning of the list of blocks that comprise a file.
+ *
+ * @param element Element being visited
+ */
+ abstract void visitEnclosingElement(ImageElement element)
+ throws IOException;
+
+ /**
+ * Begin visiting an element that encloses another element, such as
+ * the beginning of the list of blocks that comprise a file.
+ *
+ * Also provide an additional key and value for the element, such as the
+ * number items within the element.
+ *
+ * @param element Element being visited
+ * @param key Key describing the element being visited
+ * @param value Value associated with element being visited
+ */
+ abstract void visitEnclosingElement(ImageElement element,
+ ImageElement key, String value) throws IOException;
+
+ // Convenience methods to automatically convert value types to strings
+ void visitEnclosingElement(ImageElement element,
+ ImageElement key, int value)
+ throws IOException {
+ visitEnclosingElement(element, key, Integer.toString(value));
+ }
+
+ void visitEnclosingElement(ImageElement element,
+ ImageElement key, long value)
+ throws IOException {
+ visitEnclosingElement(element, key, Long.toString(value));
+ }
+
+ /**
+ * Leave current enclosing element. Called, for instance, at the end of
+ * processing the blocks that compromise a file.
+ */
+ abstract void leaveEnclosingElement() throws IOException;
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/IndentedImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/IndentedImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/IndentedImageVisitor.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/IndentedImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+
+/**
+ * IndentedImageVisitor walks over an FSImage and displays its structure
+ * using indenting to organize sections within the image file.
+ */
+class IndentedImageVisitor extends TextWriterImageVisitor {
+
+ public IndentedImageVisitor(String filename) throws IOException {
+ super(filename);
+ }
+
+ public IndentedImageVisitor(String filename, boolean printToScreen) throws IOException {
+ super(filename, printToScreen);
+ }
+
+ final private DepthCounter dc = new DepthCounter();// to track leading spacing
+
+ @Override
+ void start() throws IOException {}
+
+ @Override
+ void finish() throws IOException { super.finish(); }
+
+ @Override
+ void finishAbnormally() throws IOException {
+ System.out.println("*** Image processing finished abnormally. Ending ***");
+ super.finishAbnormally();
+ }
+
+ @Override
+ void leaveEnclosingElement() throws IOException {
+ dc.decLevel();
+ }
+
+ @Override
+ void visit(ImageElement element, String value) throws IOException {
+ printIndents();
+ write(element + " = " + value + "\n");
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element) throws IOException {
+ printIndents();
+ write(element + "\n");
+ dc.incLevel();
+ }
+
+ // Print element, along with associated key/value pair, in brackets
+ @Override
+ void visitEnclosingElement(ImageElement element,
+ ImageElement key, String value)
+ throws IOException {
+ printIndents();
+ write(element + " [" + key + " = " + value + "]\n");
+ dc.incLevel();
+ }
+
+ /**
+ * Print an appropriate number of spaces for the current level.
+ * FsImages can potentially be millions of lines long, so caching can
+ * significantly speed up output.
+ */
+ final private static String [] indents = { "",
+ " ",
+ " ",
+ " ",
+ " ",
+ " ",
+ " "};
+ private void printIndents() throws IOException {
+ try {
+ write(indents[dc.getLevel()]);
+ } catch (IndexOutOfBoundsException e) {
+ // There's no reason in an fsimage would need a deeper indent
+ for(int i = 0; i < dc.getLevel(); i++)
+ write(" ");
+ }
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/LsImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,172 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+import java.util.Formatter;
+import java.util.LinkedList;
+
+/**
+ * LsImageVisitor displays the blocks of the namespace in a format very similar
+ * to the output of ls/lsr. Entries are marked as directories or not,
+ * permissions listed, replication, username and groupname, along with size,
+ * modification date and full path.
+ *
+ * Note: A significant difference between the output of the lsr command
+ * and this image visitor is that this class cannot sort the file entries;
+ * they are listed in the order they are stored within the fsimage file.
+ * Therefore, the output of this class cannot be directly compared to the
+ * output of the lsr command.
+ */
+class LsImageVisitor extends TextWriterImageVisitor {
+ final private LinkedList<ImageElement> elemQ = new LinkedList<ImageElement>();
+
+ private int numBlocks;
+ private String perms;
+ private int replication;
+ private String username;
+ private String group;
+ private long filesize;
+ private String modTime;
+ private String path;
+
+ private boolean inInode = false;
+ final private StringBuilder sb = new StringBuilder();
+ final private Formatter formatter = new Formatter(sb);
+
+ public LsImageVisitor(String filename) throws IOException {
+ super(filename);
+ }
+
+ public LsImageVisitor(String filename, boolean printToScreen) throws IOException {
+ super(filename, printToScreen);
+ }
+
+ /**
+ * Start a new line of output, reset values.
+ */
+ private void newLine() {
+ numBlocks = 0;
+ perms = username = group = path = "";
+ filesize = 0l;
+ replication = 0;
+
+ inInode = true;
+ }
+
+ /**
+ * All the values have been gathered. Print them to the console in an
+ * ls-style format.
+ * @throws IOException
+ */
+ private final static int widthRepl = 2;
+ private final static int widthUser = 8;
+ private final static int widthGroup = 10;
+ private final static int widthSize = 10;
+ private final static int widthMod = 10;
+ private final static String lsStr = " %" + widthRepl + "s %" + widthUser +
+ "s %" + widthGroup + "s %" + widthSize +
+ "d %" + widthMod + "s %s";
+ private void printLine() throws IOException {
+ sb.append(numBlocks < 0 ? "d" : "-");
+ sb.append(perms);
+
+ formatter.format(lsStr, replication > 0 ? replication : "-",
+ username, group, filesize, modTime, path);
+ sb.append("\n");
+
+ write(sb.toString());
+ sb.setLength(0); // clear string builder
+
+ inInode = false;
+ }
+
+ @Override
+ void start() throws IOException {}
+
+ @Override
+ void finish() throws IOException {
+ super.finish();
+ }
+
+ @Override
+ void finishAbnormally() throws IOException {
+ System.out.println("Output ended unexpectedly.");
+ super.finishAbnormally();
+ }
+
+ @Override
+ void leaveEnclosingElement() throws IOException {
+ ImageElement elem = elemQ.pop();
+
+ if(elem == ImageElement.Inode)
+ printLine();
+ }
+
+ // Maintain state of location within the image tree and record
+ // values needed to display the inode in ls-style format.
+ @Override
+ void visit(ImageElement element, String value) throws IOException {
+ if(inInode) {
+ switch(element) {
+ case INodePath:
+ if(value.equals("")) path = "/";
+ else path = value;
+ break;
+ case PermString:
+ perms = value;
+ break;
+ case Replication:
+ replication = Integer.valueOf(value);
+ break;
+ case Username:
+ username = value;
+ break;
+ case GroupName:
+ group = value;
+ break;
+ case NumBytes:
+ filesize += Long.valueOf(value);
+ break;
+ case ModificationTime:
+ modTime = value;
+ break;
+ default:
+ // This is OK. We're not looking for all the values.
+ break;
+ }
+ }
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element) throws IOException {
+ elemQ.push(element);
+ if(element == ImageElement.Inode)
+ newLine();
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element,
+ ImageElement key, String value) throws IOException {
+ elemQ.push(element);
+ if(element == ImageElement.Inode)
+ newLine();
+ else if (element == ImageElement.Blocks)
+ numBlocks = Integer.valueOf(value);
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/OfflineImageViewer.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.BufferedInputStream;
+import java.io.DataInputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+
+/**
+ * OfflineImageViewer to dump the contents of an Hadoop image file to XML
+ * or the console. Main entry point into utility, either via the
+ * command line or programatically.
+ */
+public class OfflineImageViewer {
+ private final static String usage =
+ "Usage: bin/hdfs oiv [OPTIONS] -i INPUTFILE -o OUTPUTFILE\n" +
+ "Offline Image Viewer\n" +
+ "View a Hadoop fsimage INPUTFILE using the specified PROCESSOR,\n" +
+ "saving the results in OUTPUTFILE.\n" +
+ "\n" +
+ "The oiv utility will attempt to parse correctly formed image files\n" +
+ "and will abort fail with mal-formed image files. Currently the\n" +
+ "supports FSImage layout versions -16 through -19.\n" +
+ "\n" +
+ "The tool works offline and does not require a running cluster in\n" +
+ "order to process an image file.\n" +
+ "\n" +
+ "The following image processors are available:\n" +
+ " * Ls: The default image processor generates an lsr-style listing\n" +
+ " of the files in the namespace, with the same fields in the same\n" +
+ " order. Note that in order to correctly determine file sizes,\n" +
+ " this formatter cannot skip blocks and will override the\n" +
+ " -skipBlocks option.\n" +
+ " * Indented: This processor enumerates over all of the elements in\n" +
+ " the fsimage file, using levels of indentation to delineate\n" +
+ " sections within the file.\n" +
+ " * XML: This processor creates an XML document with all elements of\n" +
+ " the fsimage enumerated, suitable for further analysis by XML\n" +
+ " tools.\n" +
+ "\n" +
+ "Required command line arguments:\n" +
+ "-i,--inputFile <arg> FSImage file to process.\n" +
+ "-o,--outputFile <arg> Name of output file. If the specified\n" +
+ " file exists, it will be overwritten.\n" +
+ "\n" +
+ "Optional command line arguments:\n" +
+ "-p,--processor <arg> Select which type of processor to apply\n" +
+ " against image file. (Ls|XML|Indented).\n" +
+ "-h,--help Display usage information and exit\n" +
+ "-printToScreen For processors that write to a file, also\n" +
+ " output to screen. On large image files this\n" +
+ " will dramatically increase processing time.\n" +
+ "-skipBlocks Skip inodes' blocks information. May\n" +
+ " significantly decrease output.\n" +
+ " (default = false).\n";
+
+ private final boolean skipBlocks;
+ private final String inputFile;
+ private final ImageVisitor processor;
+
+ public OfflineImageViewer(String inputFile, ImageVisitor processor,
+ boolean skipBlocks) {
+ this.inputFile = inputFile;
+ this.processor = processor;
+ this.skipBlocks = skipBlocks;
+ }
+
+ /**
+ * Process image file.
+ */
+ public void go() throws IOException {
+ DataInputStream in = null;
+
+ try {
+ in = new DataInputStream(new BufferedInputStream(
+ new FileInputStream(new File(inputFile))));
+
+ int imageVersionFile = findImageVersion(in);
+
+ ImageLoader fsip =
+ ImageLoader.LoaderFactory.getLoader(imageVersionFile);
+
+ if(fsip == null)
+ throw new IOException("No image processor to read version " +
+ imageVersionFile + " is available.");
+
+ fsip.loadImage(in, processor, skipBlocks);
+
+ } finally {
+ if(in != null) in.close();
+ }
+ }
+
+ /**
+ * Check an fsimage datainputstream's version number.
+ *
+ * The datainput stream is returned at the same point as it was passed in;
+ * this method has no effect on the datainputstream's read pointer.
+ *
+ * @param in Datainputstream of fsimage
+ * @return Filesystem layout version of fsimage represented by stream
+ * @throws IOException If problem reading from in
+ */
+ private int findImageVersion(DataInputStream in) throws IOException {
+ in.mark(42); // arbitrary amount, resetting immediately
+
+ int version = in.readInt();
+ in.reset();
+
+ return version;
+ }
+
+ /**
+ * Build command-line options and descriptions
+ */
+ public static Options buildOptions() {
+ Options options = new Options();
+
+ // Build in/output file arguments, which are required, but there is no
+ // addOption method that can specify this
+ OptionBuilder.isRequired();
+ OptionBuilder.hasArgs();
+ OptionBuilder.withLongOpt("outputFile");
+ options.addOption(OptionBuilder.create("o"));
+
+ OptionBuilder.isRequired();
+ OptionBuilder.hasArgs();
+ OptionBuilder.withLongOpt("inputFile");
+ options.addOption(OptionBuilder.create("i"));
+
+ options.addOption("p", "processor", true, "");
+ options.addOption("h", "help", false, "");
+ options.addOption("skipBlocks", false, "");
+ options.addOption("printToScreen", false, "");
+
+ return options;
+ }
+
+ /**
+ * Entry point to command-line-driven operation. User may specify
+ * options and start fsimage viewer from the command line. Program
+ * will process image file and exit cleanly or, if an error is
+ * encountered, inform user and exit.
+ *
+ * @param args Command line options
+ * @throws IOException
+ */
+ public static void main(String[] args) throws IOException {
+ Options options = buildOptions();
+ if(args.length == 0) {
+ printUsage();
+ return;
+ }
+
+ CommandLineParser parser = new PosixParser();
+ CommandLine cmd;
+
+ try {
+ cmd = parser.parse(options, args);
+ } catch (ParseException e) {
+ System.out.println("Error parsing command-line options: ");
+ printUsage();
+ return;
+ }
+
+ if(cmd.hasOption("h")) { // print help and exit
+ printUsage();
+ return;
+ }
+
+ boolean skipBlocks = cmd.hasOption("skipBlocks");
+ boolean printToScreen = cmd.hasOption("printToScreen");
+ String inputFile = cmd.getOptionValue("i");
+ String processor = cmd.getOptionValue("p", "Ls");
+ String outputFile;
+
+ ImageVisitor v;
+ if(processor.equals("Indented")) {
+ outputFile = cmd.getOptionValue("o");
+ v = new IndentedImageVisitor(outputFile, printToScreen);
+ } else if (processor.equals("XML")) {
+ outputFile = cmd.getOptionValue("o");
+ v = new XmlImageVisitor(outputFile, printToScreen);
+ } else {
+ outputFile = cmd.getOptionValue("o");
+ v = new LsImageVisitor(outputFile, printToScreen);
+ skipBlocks = false;
+ }
+
+ try {
+ OfflineImageViewer d = new OfflineImageViewer(inputFile, v, skipBlocks);
+ d.go();
+ } catch (EOFException e) {
+ System.err.println("Input file ended unexpectedly. Exiting");
+ } catch(IOException e) {
+ System.err.println("Encountered exception. Exiting: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Print application usage instructions.
+ */
+ private static void printUsage() {
+ System.out.println(usage);
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.FileWriter;
+import java.io.IOException;
+
+/**
+ * TextWriterImageProcessor mixes in the ability for ImageVisitor
+ * implementations to easily write their output to a text file.
+ *
+ * Implementing classes should be sure to call the super methods for the
+ * constructors, finish and finishAbnormally methods, in order that the
+ * underlying file may be opened and closed correctly.
+ *
+ * Note, this class does not add newlines to text written to file or (if
+ * enabled) screen. This is the implementing class' responsibility.
+ */
+abstract class TextWriterImageVisitor extends ImageVisitor {
+ private boolean printToScreen = false;
+ private boolean okToWrite = false;
+ final private FileWriter fw;
+
+ /**
+ * Create a processor that writes to the file named.
+ *
+ * @param filename Name of file to write output to
+ */
+ public TextWriterImageVisitor(String filename) throws IOException {
+ this(filename, false);
+ }
+
+ /**
+ * Create a processor that writes to the file named and may or may not
+ * also output to the screen, as specified.
+ *
+ * @param filename Name of file to write output to
+ * @param printToScreen Mirror output to screen?
+ */
+ public TextWriterImageVisitor(String filename, boolean printToScreen)
+ throws IOException {
+ super();
+ this.printToScreen = printToScreen;
+ fw = new FileWriter(filename);
+ okToWrite = true;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.hdfs.tools.offlineImageViewer.ImageVisitor#finish()
+ */
+ @Override
+ void finish() throws IOException {
+ close();
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.hdfs.tools.offlineImageViewer.ImageVisitor#finishAbnormally()
+ */
+ @Override
+ void finishAbnormally() throws IOException {
+ close();
+ }
+
+ /**
+ * Close output stream and prevent further writing
+ */
+ private void close() throws IOException {
+ fw.close();
+ okToWrite = false;
+ }
+
+ /**
+ * Write parameter to output file (and possibly screen).
+ *
+ * @param toWrite Text to write to file
+ */
+ protected void write(String toWrite) throws IOException {
+ if(!okToWrite)
+ throw new IOException("file not open for writing.");
+
+ if(printToScreen)
+ System.out.print(toWrite);
+
+ try {
+ fw.write(toWrite);
+ } catch (IOException e) {
+ okToWrite = false;
+ throw e;
+ }
+ }
+}
Added: hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java (added)
+++ hadoop/core/trunk/src/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/XmlImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+import java.util.LinkedList;
+
+/**
+ * An XmlImageVisitor walks over an fsimage structure and writes out
+ * an equivalent XML document that contains the fsimage's components.
+ */
+class XmlImageVisitor extends TextWriterImageVisitor {
+ final private LinkedList<ImageElement> tagQ =
+ new LinkedList<ImageElement>();
+
+ public XmlImageVisitor(String filename) throws IOException {
+ super(filename, false);
+ }
+
+ public XmlImageVisitor(String filename, boolean printToScreen)
+ throws IOException {
+ super(filename, printToScreen);
+ }
+
+ @Override
+ void finish() throws IOException {
+ super.finish();
+ }
+
+ @Override
+ void finishAbnormally() throws IOException {
+ write("\n<!-- Error processing image file. Exiting -->\n");
+ super.finishAbnormally();
+ }
+
+ @Override
+ void leaveEnclosingElement() throws IOException {
+ if(tagQ.size() == 0)
+ throw new IOException("Tried to exit non-existent enclosing element " +
+ "in FSImage file");
+
+ ImageElement element = tagQ.pop();
+ write("</" + element.toString() + ">\n");
+ }
+
+ @Override
+ void start() throws IOException {
+ write("<?xml version=\"1.0\" ?>\n");
+ }
+
+ @Override
+ void visit(ImageElement element, String value) throws IOException {
+ writeTag(element.toString(), value);
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element) throws IOException {
+ write("<" + element.toString() + ">\n");
+ tagQ.push(element);
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element,
+ ImageElement key, String value)
+ throws IOException {
+ write("<" + element.toString() + " " + key + "=\"" + value +"\">\n");
+ tagQ.push(element);
+ }
+
+ private void writeTag(String tag, String value) throws IOException {
+ write("<" + tag + ">" + value + "</" + tag + ">\n");
+ }
+}
Added: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java (added)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/SpotCheckImageVisitor.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * ImageVisitor to spot check an fsimage and generate several statistics
+ * about it that we can compare with known values to give a reasonable
+ * assertion that the image was processed correctly.
+ */
+class SpotCheckImageVisitor extends ImageVisitor {
+
+ // Statistics gathered by the visitor for Inodes and InodesUnderConstruction
+ static public class ImageInfo {
+ public long totalNumBlocks = 0; // Total number of blocks in section
+ public Set<String> pathNames = new HashSet<String>(); // All path names
+ public long totalFileSize = 0; // Total size of all the files
+ public long totalReplications = 0; // Sum of all the replications
+ }
+
+ final private ImageInfo inodes = new ImageInfo();
+ final private ImageInfo INUCs = new ImageInfo();
+ private ImageInfo current = null;
+
+ @Override
+ void visit(ImageElement element, String value) throws IOException {
+ if(element == ImageElement.NumBytes)
+ current.totalFileSize += Long.valueOf(value);
+ else if (element == ImageElement.Replication)
+ current.totalReplications += Long.valueOf(value);
+ else if (element == ImageElement.INodePath)
+ current.pathNames.add(value);
+ }
+
+ @Override
+ void visitEnclosingElement(ImageElement element, ImageElement key,
+ String value) throws IOException {
+ switch(element) {
+ case INodes:
+ current = inodes;
+ break;
+ case INodesUnderConstruction:
+ current = INUCs;
+ break;
+ case Blocks:
+ current.totalNumBlocks += Long.valueOf(value);
+ break;
+ // OK to not have a default, we're skipping most of the values
+ }
+ }
+
+ public ImageInfo getINodesInfo() { return inodes; }
+
+ public ImageInfo getINUCsInfo() { return INUCs; }
+
+ // Unnecessary visitor methods
+ @Override
+ void finish() throws IOException {}
+
+ @Override
+ void finishAbnormally() throws IOException {}
+
+ @Override
+ void leaveEnclosingElement() throws IOException {}
+
+ @Override
+ void start() throws IOException {}
+
+ @Override
+ void visitEnclosingElement(ImageElement element) throws IOException {}
+}
Added: hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOIVCanReadOldVersions.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOIVCanReadOldVersions.java?rev=767331&view=auto
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOIVCanReadOldVersions.java (added)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOIVCanReadOldVersions.java Tue Apr 21 23:39:33 2009
@@ -0,0 +1,132 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.tools.offlineImageViewer;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.hadoop.hdfs.tools.offlineImageViewer.SpotCheckImageVisitor.ImageInfo;
+
+import junit.framework.TestCase;
+
+public class TestOIVCanReadOldVersions extends TestCase {
+ // Location of fsimage files during testing.
+ public static final String TEST_CACHE_DATA_DIR =
+ System.getProperty("test.cache.data", "build/test/cache");
+
+ // Verify that the image processor can correctly process prior Hadoop
+ // layout versions. These fsimages were previously generated and stored
+ // with the test. Test success indicates that no changes have been made
+ // to the OIV that causes older fsimages to be incorrectly processed.
+ public void testOldFSImages() {
+ // Define the expected values from the prior versions, as they were created
+ // and verified at time of creation
+ Set<String> pathNames = new HashSet<String>();
+ Collections.addAll(pathNames, "", /* root */
+ "/bar",
+ "/bar/dir0",
+ "/bar/dir0/file0",
+ "/bar/dir0/file1",
+ "/bar/dir1",
+ "/bar/dir1/file0",
+ "/bar/dir1/file1",
+ "/bar/dir2",
+ "/bar/dir2/file0",
+ "/bar/dir2/file1",
+ "/foo",
+ "/foo/dir0",
+ "/foo/dir0/file0",
+ "/foo/dir0/file1",
+ "/foo/dir0/file2",
+ "/foo/dir0/file3",
+ "/foo/dir1",
+ "/foo/dir1/file0",
+ "/foo/dir1/file1",
+ "/foo/dir1/file2",
+ "/foo/dir1/file3");
+
+ Set<String> INUCpaths = new HashSet<String>();
+ Collections.addAll(INUCpaths, "/bar/dir0/file0",
+ "/bar/dir0/file1",
+ "/bar/dir1/file0",
+ "/bar/dir1/file1",
+ "/bar/dir2/file0",
+ "/bar/dir2/file1");
+
+ ImageInfo v18Inodes = new ImageInfo(); // Hadoop version 18 inodes
+ v18Inodes.totalNumBlocks = 12;
+ v18Inodes.totalFileSize = 1069548540l;
+ v18Inodes.pathNames = pathNames;
+ v18Inodes.totalReplications = 14;
+
+ ImageInfo v18INUCs = new ImageInfo(); // Hadoop version 18 inodes under construction
+ v18INUCs.totalNumBlocks = 0;
+ v18INUCs.totalFileSize = 0;
+ v18INUCs.pathNames = INUCpaths;
+ v18INUCs.totalReplications = 6;
+
+ ImageInfo v19Inodes = new ImageInfo(); // Hadoop version 19 inodes
+ v19Inodes.totalNumBlocks = 12;
+ v19Inodes.totalFileSize = 1069548540l;
+ v19Inodes.pathNames = pathNames;
+ v19Inodes.totalReplications = 14;
+
+ ImageInfo v19INUCs = new ImageInfo(); // Hadoop version 19 inodes under construction
+ v19INUCs.totalNumBlocks = 0;
+ v19INUCs.totalFileSize = 0;
+ v19INUCs.pathNames = INUCpaths;
+ v19INUCs.totalReplications = 6;
+
+
+ spotCheck("18", TEST_CACHE_DATA_DIR + "/fsimageV18", v18Inodes, v18INUCs);
+ spotCheck("19", TEST_CACHE_DATA_DIR + "/fsimageV19", v19Inodes, v19INUCs);
+ }
+
+ // Check that running the processor now gives us the same values as before
+ private void spotCheck(String hadoopVersion, String input,
+ ImageInfo inodes, ImageInfo INUCs) {
+ SpotCheckImageVisitor v = new SpotCheckImageVisitor();
+ OfflineImageViewer oiv = new OfflineImageViewer(input, v, false);
+ try {
+ oiv.go();
+ } catch (IOException e) {
+ fail("Error processing file: " + input);
+ }
+
+ compareSpotCheck(hadoopVersion, v.getINodesInfo(), inodes);
+ compareSpotCheck(hadoopVersion, v.getINUCsInfo(), INUCs);
+ System.out.println("Successfully processed fsimage file from Hadoop version " +
+ hadoopVersion);
+ }
+
+ // Compare the spot check results of what we generated from the image
+ // processor and what we expected to receive
+ private void compareSpotCheck(String hadoopVersion,
+ ImageInfo generated, ImageInfo expected) {
+ assertEquals("Version " + hadoopVersion + ": Same number of total blocks",
+ expected.totalNumBlocks, generated.totalNumBlocks);
+ assertEquals("Version " + hadoopVersion + ": Same total file size",
+ expected.totalFileSize, generated.totalFileSize);
+ assertEquals("Version " + hadoopVersion + ": Same total replication factor",
+ expected.totalReplications, generated.totalReplications);
+ assertEquals("Version " + hadoopVersion + ": One-to-one matching of path names",
+ expected.pathNames, generated.pathNames);
+ }
+}