You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/03/04 02:02:19 UTC
[40/50] [abbrv] hadoop git commit: HDFS-9835. OIV: add ReverseXML
processor which reconstructs an fsimage from an XML file (cmccabe)
http://git-wip-us.apache.org/repos/asf/hadoop/blob/700b0e40/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
index d23ff61..71dc939 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java
@@ -23,17 +23,24 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.RandomAccessFile;
+import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
+import java.util.Date;
+import java.util.TimeZone;
+import com.google.protobuf.ByteString;
+import org.apache.commons.codec.binary.Hex;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoExpirationProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CacheDirectiveInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.CachePoolInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
+import org.apache.hadoop.hdfs.protocol.proto.XAttrProtos;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode;
import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName;
import org.apache.hadoop.hdfs.server.namenode.FSImageUtil;
@@ -55,6 +62,14 @@ import org.apache.hadoop.hdfs.util.XMLUtils;
import org.apache.hadoop.util.LimitInputStream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
+import org.apache.hadoop.util.VersionInfo;
+
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_MASK;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_OFFSET;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_MASK;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAMESPACE_EXT_OFFSET;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_OFFSET;
+import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.XATTR_NAME_MASK;
/**
* PBImageXmlWriter walks over an fsimage structure and writes out
@@ -64,11 +79,20 @@ import com.google.common.collect.Lists;
public final class PBImageXmlWriter {
private final Configuration conf;
private final PrintStream out;
+ private final SimpleDateFormat isoDateFormat;
private String[] stringTable;
+ public static SimpleDateFormat createSimpleDateFormat() {
+ SimpleDateFormat format =
+ new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
+ format.setTimeZone(TimeZone.getTimeZone("UTC"));
+ return format;
+ }
+
public PBImageXmlWriter(Configuration conf, PrintStream out) {
this.conf = conf;
this.out = out;
+ this.isoDateFormat = createSimpleDateFormat();
}
public void visit(RandomAccessFile file) throws IOException {
@@ -80,6 +104,16 @@ public final class PBImageXmlWriter {
try (FileInputStream fin = new FileInputStream(file.getFD())) {
out.print("<?xml version=\"1.0\"?>\n<fsimage>");
+ out.print("<version>");
+ o("layoutVersion", summary.getLayoutVersion());
+ o("onDiskVersion", summary.getOndiskVersion());
+ // Output the version of OIV (which is not necessarily the version of
+ // the fsimage file). This could be helpful in the case where a bug
+ // in OIV leads to information loss in the XML-- we can quickly tell
+ // if a specific fsimage XML file is affected by this bug.
+ o("oivRevision", VersionInfo.getRevision());
+ out.print("</version>\n");
+
ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
.getSectionsList());
Collections.sort(sections, new Comparator<FileSummary.Section>() {
@@ -146,6 +180,8 @@ public final class PBImageXmlWriter {
out.print("<CacheManagerSection>");
CacheManagerSection s = CacheManagerSection.parseDelimitedFrom(is);
o("nextDirectiveId", s.getNextDirectiveId());
+ o("numDirectives", s.getNumDirectives());
+ o("numPools", s.getNumPools());
for (int i = 0; i < s.getNumPools(); ++i) {
CachePoolInfoProto p = CachePoolInfoProto.parseDelimitedFrom(is);
out.print("<pool>");
@@ -163,7 +199,7 @@ public final class PBImageXmlWriter {
.o("replication", p.getReplication()).o("pool", p.getPool());
out.print("<expiration>");
CacheDirectiveInfoExpirationProto e = p.getExpiration();
- o("millis", e.getMillis()).o("relatilve", e.getIsRelative());
+ o("millis", e.getMillis()).o("relative", e.getIsRelative());
out.print("</expiration>\n");
out.print("</directive>\n");
}
@@ -187,13 +223,48 @@ public final class PBImageXmlWriter {
out.print("</FileUnderConstructionSection>\n");
}
+ private void dumpXattrs(INodeSection.XAttrFeatureProto xattrs) {
+ out.print("<xattrs>");
+ for (INodeSection.XAttrCompactProto xattr : xattrs.getXAttrsList()) {
+ out.print("<xattr>");
+ int encodedName = xattr.getName();
+ int ns = (XATTR_NAMESPACE_MASK & (encodedName >> XATTR_NAMESPACE_OFFSET)) |
+ ((XATTR_NAMESPACE_EXT_MASK & (encodedName >> XATTR_NAMESPACE_EXT_OFFSET)) << 2);
+ o("ns", XAttrProtos.XAttrProto.
+ XAttrNamespaceProto.valueOf(ns).toString());
+ o("name", stringTable[XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET)]);
+ ByteString val = xattr.getValue();
+ if (val.isValidUtf8()) {
+ o("val", val.toStringUtf8());
+ } else {
+ o("valHex", Hex.encodeHexString(val.toByteArray()));
+ }
+ out.print("</xattr>");
+ }
+ out.print("</xattrs>");
+ }
+
private void dumpINodeDirectory(INodeDirectory d) {
o("mtime", d.getModificationTime()).o("permission",
dumpPermission(d.getPermission()));
+ if (d.hasXAttrs()) {
+ dumpXattrs(d.getXAttrs());
+ }
dumpAcls(d.getAcl());
if (d.hasDsQuota() && d.hasNsQuota()) {
o("nsquota", d.getNsQuota()).o("dsquota", d.getDsQuota());
}
+ INodeSection.QuotaByStorageTypeFeatureProto typeQuotas =
+ d.getTypeQuotas();
+ if (typeQuotas != null) {
+ for (INodeSection.QuotaByStorageTypeEntryProto entry:
+ typeQuotas.getQuotasList()) {
+ out.print("<typeQuota>");
+ o("type", entry.getStorageType().toString());
+ o("quota", entry.getQuota());
+ out.print("</typeQuota>");
+ }
+ }
}
private void dumpINodeDirectorySection(InputStream in) throws IOException {
@@ -208,10 +279,10 @@ public final class PBImageXmlWriter {
out.print("<directory>");
o("parent", e.getParent());
for (long id : e.getChildrenList()) {
- o("inode", id);
+ o("child", id);
}
for (int refId : e.getRefChildrenList()) {
- o("inodereference-index", refId);
+ o("refChild", refId);
}
out.print("</directory>\n");
}
@@ -244,6 +315,9 @@ public final class PBImageXmlWriter {
.o("atime", f.getAccessTime())
.o("preferredBlockSize", f.getPreferredBlockSize())
.o("permission", dumpPermission(f.getPermission()));
+ if (f.hasXAttrs()) {
+ dumpXattrs(f.getXAttrs());
+ }
dumpAcls(f.getAcl());
if (f.getBlocksCount() > 0) {
out.print("<blocks>");
@@ -255,6 +329,12 @@ public final class PBImageXmlWriter {
}
out.print("</blocks>\n");
}
+ if (f.hasStoragePolicyID()) {
+ o("storagePolicyId", f.getStoragePolicyID());
+ }
+ if (f.getIsStriped()) {
+ out.print("<isStriped/>");
+ }
if (f.hasFileUC()) {
INodeSection.FileUnderConstructionFeature u = f.getFileUC();
@@ -281,25 +361,28 @@ public final class PBImageXmlWriter {
INodeSection s = INodeSection.parseDelimitedFrom(in);
out.print("<INodeSection>");
o("lastInodeId", s.getLastInodeId());
+ o("numInodes", s.getNumInodes());
for (int i = 0; i < s.getNumInodes(); ++i) {
INodeSection.INode p = INodeSection.INode.parseDelimitedFrom(in);
out.print("<inode>");
- o("id", p.getId()).o("type", p.getType()).o("name",
- p.getName().toStringUtf8());
-
- if (p.hasFile()) {
- dumpINodeFile(p.getFile());
- } else if (p.hasDirectory()) {
- dumpINodeDirectory(p.getDirectory());
- } else if (p.hasSymlink()) {
- dumpINodeSymlink(p.getSymlink());
- }
-
+ dumpINodeFields(p);
out.print("</inode>\n");
}
out.print("</INodeSection>\n");
}
+ private void dumpINodeFields(INodeSection.INode p) {
+ o("id", p.getId()).o("type", p.getType()).o("name",
+ p.getName().toStringUtf8());
+ if (p.hasFile()) {
+ dumpINodeFile(p.getFile());
+ } else if (p.hasDirectory()) {
+ dumpINodeDirectory(p.getDirectory());
+ } else if (p.hasSymlink()) {
+ dumpINodeSymlink(p.getSymlink());
+ }
+ }
+
private void dumpINodeSymlink(INodeSymlink s) {
o("permission", dumpPermission(s.getPermission()))
.o("target", s.getTarget().toStringUtf8())
@@ -308,7 +391,8 @@ public final class PBImageXmlWriter {
private void dumpNameSection(InputStream in) throws IOException {
NameSystemSection s = NameSystemSection.parseDelimitedFrom(in);
- out.print("<NameSection>\n");
+ out.print("<NameSection>");
+ o("namespaceId", s.getNamespaceId());
o("genstampV1", s.getGenstampV1()).o("genstampV2", s.getGenstampV2())
.o("genstampV1Limit", s.getGenstampV1Limit())
.o("lastAllocatedBlockId", s.getLastAllocatedBlockId())
@@ -317,18 +401,73 @@ public final class PBImageXmlWriter {
}
private String dumpPermission(long permission) {
- return FSImageFormatPBINode.Loader.loadPermission(permission, stringTable)
- .toString();
+ PermissionStatus permStatus = FSImageFormatPBINode.Loader.
+ loadPermission(permission, stringTable);
+ return String.format("%s:%s:%04o", permStatus.getUserName(),
+ permStatus.getGroupName(), permStatus.getPermission().toExtendedShort());
}
private void dumpSecretManagerSection(InputStream is) throws IOException {
out.print("<SecretManagerSection>");
SecretManagerSection s = SecretManagerSection.parseDelimitedFrom(is);
+ int expectedNumDelegationKeys = s.getNumKeys();
+ int expectedNumTokens = s.getNumTokens();
o("currentId", s.getCurrentId()).o("tokenSequenceNumber",
- s.getTokenSequenceNumber());
+ s.getTokenSequenceNumber()).
+ o("numDelegationKeys", expectedNumDelegationKeys).
+ o("numTokens", expectedNumTokens);
+ for (int i = 0; i < expectedNumDelegationKeys; i++) {
+ SecretManagerSection.DelegationKey dkey =
+ SecretManagerSection.DelegationKey.parseDelimitedFrom(is);
+ out.print("<delegationKey>");
+ o("id", dkey.getId());
+ o("key", Hex.encodeHexString(dkey.getKey().toByteArray()));
+ if (dkey.hasExpiryDate()) {
+ dumpDate("expiry", dkey.getExpiryDate());
+ }
+ out.print("</delegationKey>");
+ }
+ for (int i = 0; i < expectedNumTokens; i++) {
+ SecretManagerSection.PersistToken token =
+ SecretManagerSection.PersistToken.parseDelimitedFrom(is);
+ out.print("<token>");
+ if (token.hasVersion()) {
+ o("version", token.getVersion());
+ }
+ if (token.hasOwner()) {
+ o("owner", token.getOwner());
+ }
+ if (token.hasRenewer()) {
+ o("renewer", token.getRenewer());
+ }
+ if (token.hasRealUser()) {
+ o("realUser", token.getRealUser());
+ }
+ if (token.hasIssueDate()) {
+ dumpDate("issueDate", token.getIssueDate());
+ }
+ if (token.hasMaxDate()) {
+ dumpDate("maxDate", token.getMaxDate());
+ }
+ if (token.hasSequenceNumber()) {
+ o("sequenceNumber", token.getSequenceNumber());
+ }
+ if (token.hasMasterKeyId()) {
+ o("masterKeyId", token.getMasterKeyId());
+ }
+ if (token.hasExpiryDate()) {
+ dumpDate("expiryDate", token.getExpiryDate());
+ }
+ out.print("</token>");
+ }
out.print("</SecretManagerSection>");
}
+ private void dumpDate(String tag, long date) {
+ out.print("<" + tag + ">" +
+ isoDateFormat.format(new Date(date)) + "</" + tag + ">");
+ }
+
private void dumpSnapshotDiffSection(InputStream in) throws IOException {
out.print("<SnapshotDiffSection>");
while (true) {
@@ -337,30 +476,46 @@ public final class PBImageXmlWriter {
if (e == null) {
break;
}
- out.print("<diff>");
- o("inodeid", e.getInodeId());
+ switch (e.getType()) {
+ case FILEDIFF:
+ out.print("<fileDiffEntry>");
+ break;
+ case DIRECTORYDIFF:
+ out.print("<dirDiffEntry>");
+ break;
+ default:
+ throw new IOException("unknown DiffEntry type " + e.getType());
+ }
+ o("inodeId", e.getInodeId());
+ o("count", e.getNumOfDiff());
switch (e.getType()) {
case FILEDIFF: {
for (int i = 0; i < e.getNumOfDiff(); ++i) {
- out.print("<filediff>");
+ out.print("<fileDiff>");
SnapshotDiffSection.FileDiff f = SnapshotDiffSection.FileDiff
.parseDelimitedFrom(in);
o("snapshotId", f.getSnapshotId()).o("size", f.getFileSize()).o(
"name", f.getName().toStringUtf8());
- out.print("</filediff>\n");
+ out.print("</fileDiff>\n");
}
}
break;
case DIRECTORYDIFF: {
for (int i = 0; i < e.getNumOfDiff(); ++i) {
- out.print("<dirdiff>");
+ out.print("<dirDiff>");
SnapshotDiffSection.DirectoryDiff d = SnapshotDiffSection.DirectoryDiff
.parseDelimitedFrom(in);
o("snapshotId", d.getSnapshotId())
- .o("isSnapshotroot", d.getIsSnapshotRoot())
.o("childrenSize", d.getChildrenSize())
- .o("name", d.getName().toStringUtf8());
-
+ .o("isSnapshotRoot", d.getIsSnapshotRoot())
+ .o("name", d.getName().toStringUtf8())
+ .o("createdListSize", d.getCreatedListSize());
+ for (long did : d.getDeletedINodeList()) {
+ o("deletedInode", did);
+ }
+ for (int dRefid : d.getDeletedINodeRefList()) {
+ o("deletedInoderef", dRefid);
+ }
for (int j = 0; j < d.getCreatedListSize(); ++j) {
SnapshotDiffSection.CreatedListEntry ce = SnapshotDiffSection.CreatedListEntry
.parseDelimitedFrom(in);
@@ -368,24 +523,23 @@ public final class PBImageXmlWriter {
o("name", ce.getName().toStringUtf8());
out.print("</created>\n");
}
- for (long did : d.getDeletedINodeList()) {
- out.print("<deleted>");
- o("inode", did);
- out.print("</deleted>\n");
- }
- for (int dRefid : d.getDeletedINodeRefList()) {
- out.print("<deleted>");
- o("inodereference-index", dRefid);
- out.print("</deleted>\n");
- }
- out.print("</dirdiff>\n");
+ out.print("</dirDiff>\n");
}
- }
break;
+ }
default:
break;
}
- out.print("</diff>");
+ switch (e.getType()) {
+ case FILEDIFF:
+ out.print("</fileDiffEntry>");
+ break;
+ case DIRECTORYDIFF:
+ out.print("</dirDiffEntry>");
+ break;
+ default:
+ throw new IOException("unknown DiffEntry type " + e.getType());
+ }
}
out.print("</SnapshotDiffSection>\n");
}
@@ -394,6 +548,7 @@ public final class PBImageXmlWriter {
out.print("<SnapshotSection>");
SnapshotSection s = SnapshotSection.parseDelimitedFrom(in);
o("snapshotCounter", s.getSnapshotCounter());
+ o("numSnapshots", s.getNumSnapshots());
if (s.getSnapshottableDirCount() > 0) {
out.print("<snapshottableDir>");
for (long id : s.getSnapshottableDirList()) {
@@ -404,7 +559,12 @@ public final class PBImageXmlWriter {
for (int i = 0; i < s.getNumSnapshots(); ++i) {
SnapshotSection.Snapshot pbs = SnapshotSection.Snapshot
.parseDelimitedFrom(in);
- o("snapshot", pbs.getSnapshotId());
+ out.print("<snapshot>");
+ o("id", pbs.getSnapshotId());
+ out.print("<root>");
+ dumpINodeFields(pbs.getRoot());
+ out.print("</root>");
+ out.print("</snapshot>");
}
out.print("</SnapshotSection>\n");
}
@@ -420,6 +580,14 @@ public final class PBImageXmlWriter {
}
private PBImageXmlWriter o(final String e, final Object v) {
+ if (v instanceof Boolean) {
+ // For booleans, the presence of the element indicates true, and its
+ // absence indicates false.
+ if ((Boolean)v != false) {
+ out.print("<" + e + "/>");
+ }
+ return this;
+ }
out.print("<" + e + ">" +
XMLUtils.mangleXmlString(v.toString(), true) + "</" + e + ">");
return this;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/700b0e40/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
index 91b79e2..c7a6ae9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TestOfflineImageViewer.java
@@ -47,6 +47,8 @@ import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
+import com.google.common.io.Files;
+import org.apache.commons.io.FileUtils;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -66,6 +68,8 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.test.GenericTestUtils;
+import org.apache.log4j.Level;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -89,8 +93,7 @@ public class TestOfflineImageViewer {
final static HashMap<String, FileStatus> writtenFiles = Maps.newHashMap();
static int dirCount = 0;
- @Rule
- public TemporaryFolder folder = new TemporaryFolder();
+ private static File tempDir;
// Create a populated namespace for later testing. Save its contents to a
// data structure and store its fsimage location.
@@ -98,6 +101,7 @@ public class TestOfflineImageViewer {
// multiple tests.
@BeforeClass
public static void createOriginalFSImage() throws IOException {
+ tempDir = Files.createTempDir();
MiniDFSCluster cluster = null;
try {
Configuration conf = new Configuration();
@@ -169,6 +173,9 @@ public class TestOfflineImageViewer {
hdfs.setXAttr(xattr, "user.a2", new byte[]{ 0x37, 0x38, 0x39 });
// OIV should be able to handle empty value XAttrs
hdfs.setXAttr(xattr, "user.a3", null);
+ // OIV should be able to handle XAttr values that can't be expressed
+ // as UTF8
+ hdfs.setXAttr(xattr, "user.a4", new byte[]{ -0x3d, 0x28 });
writtenFiles.put(xattr.toString(), hdfs.getFileStatus(xattr));
// Write results to the fsimage file
@@ -190,6 +197,7 @@ public class TestOfflineImageViewer {
@AfterClass
public static void deleteOriginalFSImage() throws IOException {
+ FileUtils.deleteQuietly(tempDir);
if (originalFsimage != null && originalFsimage.exists()) {
originalFsimage.delete();
}
@@ -204,7 +212,7 @@ public class TestOfflineImageViewer {
@Test(expected = IOException.class)
public void testTruncatedFSImage() throws IOException {
- File truncatedFile = folder.newFile();
+ File truncatedFile = new File(tempDir, "truncatedFsImage");
PrintStream output = new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM);
copyPartOfFile(originalFsimage, truncatedFile);
new FileDistributionCalculator(new Configuration(), 0, 0, output)
@@ -449,4 +457,46 @@ public class TestOfflineImageViewer {
connection.connect();
assertEquals(expectedCode, connection.getResponseCode());
}
+
+ /**
+ * Tests the ReverseXML processor.
+ *
+ * 1. Translate fsimage -> reverseImage.xml
+ * 2. Translate reverseImage.xml -> reverseImage
+ * 3. Translate reverseImage -> reverse2Image.xml
+ * 4. Verify that reverseImage.xml and reverse2Image.xml match
+ *
+ * @throws Throwable
+ */
+ @Test
+ public void testReverseXmlRoundTrip() throws Throwable {
+ GenericTestUtils.setLogLevel(OfflineImageReconstructor.LOG,
+ Level.TRACE);
+ File reverseImageXml = new File(tempDir, "reverseImage.xml");
+ File reverseImage = new File(tempDir, "reverseImage");
+ File reverseImage2Xml = new File(tempDir, "reverseImage2.xml");
+ LOG.info("Creating reverseImage.xml=" + reverseImageXml.getAbsolutePath() +
+ ", reverseImage=" + reverseImage.getAbsolutePath() +
+ ", reverseImage2Xml=" + reverseImage2Xml.getAbsolutePath());
+ if (OfflineImageViewerPB.run(new String[] { "-p", "XML",
+ "-i", originalFsimage.getAbsolutePath(),
+ "-o", reverseImageXml.getAbsolutePath() }) != 0) {
+ throw new IOException("oiv returned failure creating first XML file.");
+ }
+ if (OfflineImageViewerPB.run(new String[] { "-p", "ReverseXML",
+ "-i", reverseImageXml.getAbsolutePath(),
+ "-o", reverseImage.getAbsolutePath() }) != 0) {
+ throw new IOException("oiv returned failure recreating fsimage file.");
+ }
+ if (OfflineImageViewerPB.run(new String[] { "-p", "XML",
+ "-i", reverseImage.getAbsolutePath(),
+ "-o", reverseImage2Xml.getAbsolutePath() }) != 0) {
+ throw new IOException("oiv returned failure creating second " +
+ "XML file.");
+ }
+ // The XML file we wrote based on the re-created fsimage should be the
+ // same as the one we dumped from the original fsimage.
+ Assert.assertEquals("",
+ GenericTestUtils.getFilesDiff(reverseImageXml, reverseImage2Xml));
+ }
}