You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hdt.apache.org by ad...@apache.org on 2013/01/29 17:42:45 UTC

[3/4] git commit: navigator content extension ported over

navigator content extension ported over

Project: http://git-wip-us.apache.org/repos/asf/incubator-hdt/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hdt/commit/8b28cfa4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hdt/tree/8b28cfa4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hdt/diff/8b28cfa4

Branch: refs/heads/master
Commit: 8b28cfa42456e32003130831653c133be4908bcb
Parents: 464b2d7
Author: adamb <ad...@apache.org>
Authored: Mon Jan 28 15:10:22 2013 -0600
Committer: adamb <ad...@apache.org>
Committed: Mon Jan 28 15:10:22 2013 -0600

----------------------------------------------------------------------
 .../apache/hdt/core/cluster/ServerRegistry.java    |  202 +++++++++
 org.apache.hdt.dfs.core/.classpath                 |    7 +
 org.apache.hdt.dfs.core/.project                   |   28 ++
 .../.settings/org.eclipse.jdt.core.prefs           |    7 +
 org.apache.hdt.dfs.core/META-INF/MANIFEST.MF       |   15 +
 .../bin/org/apache/hdt/dfs/core/Activator.class    |  Bin 0 -> 898 bytes
 .../bin/org/apache/hdt/dfs/core/DFSContent.class   |  Bin 0 -> 241 bytes
 .../apache/hdt/dfs/core/DFSContentProvider$1.class |  Bin 0 -> 836 bytes
 .../apache/hdt/dfs/core/DFSContentProvider$2.class |  Bin 0 -> 1043 bytes
 .../apache/hdt/dfs/core/DFSContentProvider.class   |  Bin 0 -> 5369 bytes
 .../bin/org/apache/hdt/dfs/core/DFSFile$1.class    |  Bin 0 -> 1013 bytes
 .../hdt/dfs/core/DFSFile$IStorageAdapter.class     |  Bin 0 -> 1961 bytes
 .../bin/org/apache/hdt/dfs/core/DFSFile.class      |  Bin 0 -> 8359 bytes
 .../bin/org/apache/hdt/dfs/core/DFSFolder$1.class  |  Bin 0 -> 1916 bytes
 .../bin/org/apache/hdt/dfs/core/DFSFolder.class    |  Bin 0 -> 6273 bytes
 .../org/apache/hdt/dfs/core/DFSLocation$1.class    |  Bin 0 -> 2142 bytes
 .../bin/org/apache/hdt/dfs/core/DFSLocation.class  |  Bin 0 -> 2373 bytes
 .../apache/hdt/dfs/core/DFSLocationsRoot$1.class   |  Bin 0 -> 1143 bytes
 .../org/apache/hdt/dfs/core/DFSLocationsRoot.class |  Bin 0 -> 3360 bytes
 .../bin/org/apache/hdt/dfs/core/DFSMessage.class   |  Bin 0 -> 795 bytes
 .../bin/org/apache/hdt/dfs/core/DFSPath.class      |  Bin 0 -> 3798 bytes
 org.apache.hdt.dfs.core/build.properties           |    4 +
 .../src/org/apache/hdt/dfs/core/Activator.java     |   50 ++
 .../src/org/apache/hdt/dfs/core/DFSContent.java    |   32 ++
 .../apache/hdt/dfs/core/DFSContentProvider.java    |  244 ++++++++++
 .../src/org/apache/hdt/dfs/core/DFSFile.java       |  350 +++++++++++++++
 .../src/org/apache/hdt/dfs/core/DFSFolder.java     |  213 +++++++++
 .../src/org/apache/hdt/dfs/core/DFSLocation.java   |  108 +++++
 .../org/apache/hdt/dfs/core/DFSLocationsRoot.java  |  150 ++++++
 .../src/org/apache/hdt/dfs/core/DFSMessage.java    |   57 +++
 .../src/org/apache/hdt/dfs/core/DFSPath.java       |  160 +++++++
 org.apache.hdt.dfs.ui/.classpath                   |    7 +
 org.apache.hdt.dfs.ui/.project                     |   28 ++
 .../.settings/org.eclipse.jdt.core.prefs           |    7 +
 org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF         |   14 +
 .../bin/org/apache/hdt/dfs/ui/Activator.class      |  Bin 0 -> 890 bytes
 org.apache.hdt.dfs.ui/build.properties             |    6 +
 org.apache.hdt.dfs.ui/plugin.xml                   |   50 ++
 .../resources/elephantblue16x16.gif                |  Bin 0 -> 1053 bytes
 .../src/org/apache/hdt/dfs/ui/Activator.java       |   50 ++
 org.apache.hdt.ui/META-INF/MANIFEST.MF             |    1 +
 .../org/apache/hdt/ui/cluster/ServerRegistry.java  |  204 ---------
 .../src/org/apache/hdt/ui/views/ClusterView.java   |    2 +-
 .../hdt/ui/wizards/HadoopLocationWizard.java       |    2 +-
 44 files changed, 1792 insertions(+), 206 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java b/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
new file mode 100644
index 0000000..7529500
--- /dev/null
+++ b/org.apache.hdt.core/src/org/apache/hdt/core/cluster/ServerRegistry.java
@@ -0,0 +1,202 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.core.cluster;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hdt.core.Activator;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * Register of Hadoop locations.
+ * 
+ * Each location corresponds to a Hadoop {@link Configuration} stored as an
+ * XML file in the workspace plug-in configuration directory:
+ * <p>
+ * <tt>
+ * &lt;workspace-dir&gt;/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml
+ * </tt>
+ * 
+ */
+public class ServerRegistry {
+
+  private static final ServerRegistry INSTANCE = new ServerRegistry();
+
+  public static final int SERVER_ADDED = 0;
+
+  public static final int SERVER_REMOVED = 1;
+
+  public static final int SERVER_STATE_CHANGED = 2;
+
+  private final File baseDir =
+      Activator.getDefault().getStateLocation().toFile();
+
+  private final File saveDir = new File(baseDir, "locations");
+
+  private ServerRegistry() {
+    if (saveDir.exists() && !saveDir.isDirectory())
+      saveDir.delete();
+    if (!saveDir.exists())
+      saveDir.mkdirs();
+
+    load();
+  }
+
+  private Map<String, HadoopCluster> servers;
+
+  private Set<IHadoopClusterListener> listeners =
+      new HashSet<IHadoopClusterListener>();
+
+  public static ServerRegistry getInstance() {
+    return INSTANCE;
+  }
+
+  public synchronized Collection<HadoopCluster> getServers() {
+    return Collections.unmodifiableCollection(servers.values());
+  }
+
+  /**
+   * Load all available locations from the workspace configuration directory.
+   */
+  private synchronized void load() {
+    Map<String, HadoopCluster> map = new TreeMap<String, HadoopCluster>();
+    for (File file : saveDir.listFiles()) {
+      try {
+        HadoopCluster server = new HadoopCluster(file);
+        map.put(server.getLocationName(), server);
+
+      } catch (Exception exn) {
+        System.err.println(exn);
+      }
+    }
+    this.servers = map;
+  }
+
+  private synchronized void store() {
+    try {
+      File dir = File.createTempFile("locations", "new", baseDir);
+      dir.delete();
+      dir.mkdirs();
+
+      for (HadoopCluster server : servers.values()) {
+        server.storeSettingsToFile(new File(dir, server.getLocationName()
+            + ".xml"));
+      }
+
+      FilenameFilter XMLFilter = new FilenameFilter() {
+        public boolean accept(File dir, String name) {
+          String lower = name.toLowerCase();
+          return lower.endsWith(".xml");
+        }
+      };
+
+      File backup = new File(baseDir, "locations.backup");
+      if (backup.exists()) {
+        for (File file : backup.listFiles(XMLFilter))
+          if (!file.delete())
+            throw new IOException("Unable to delete backup location file: "
+                + file);
+        if (!backup.delete())
+          throw new IOException(
+              "Unable to delete backup location directory: " + backup);
+      }
+
+      saveDir.renameTo(backup);
+      dir.renameTo(saveDir);
+
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+      MessageDialog.openError(null,
+          "Saving configuration of Hadoop locations failed", ioe.toString());
+    }
+  }
+
+  public void dispose() {
+    for (HadoopCluster server : getServers()) {
+      server.dispose();
+    }
+  }
+
+  public synchronized HadoopCluster getServer(String location) {
+    return servers.get(location);
+  }
+
+  /*
+   * HadoopServer map listeners
+   */
+
+  public void addListener(IHadoopClusterListener l) {
+    synchronized (listeners) {
+      listeners.add(l);
+    }
+  }
+
+  public void removeListener(IHadoopClusterListener l) {
+    synchronized (listeners) {
+      listeners.remove(l);
+    }
+  }
+
+  private void fireListeners(HadoopCluster location, int kind) {
+    synchronized (listeners) {
+      for (IHadoopClusterListener listener : listeners) {
+        listener.serverChanged(location, kind);
+      }
+    }
+  }
+
+  public synchronized void removeServer(HadoopCluster server) {
+    this.servers.remove(server.getLocationName());
+    store();
+    fireListeners(server, SERVER_REMOVED);
+  }
+
+  public synchronized void addServer(HadoopCluster server) {
+    this.servers.put(server.getLocationName(), server);
+    store();
+    fireListeners(server, SERVER_ADDED);
+  }
+
+  /**
+   * Update one Hadoop location
+   * 
+   * @param originalName the original location name (might have changed)
+   * @param server the location
+   */
+  public synchronized void updateServer(String originalName,
+      HadoopCluster server) {
+
+    // Update the map if the location name has changed
+    if (!server.getLocationName().equals(originalName)) {
+      servers.remove(originalName);
+      servers.put(server.getLocationName(), server);
+    }
+    store();
+    fireListeners(server, SERVER_STATE_CHANGED);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/.classpath
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/.classpath b/org.apache.hdt.dfs.core/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+	<classpathentry kind="src" path="src"/>
+	<classpathentry kind="output" path="bin"/>
+</classpath>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/.project
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/.project b/org.apache.hdt.dfs.core/.project
new file mode 100644
index 0000000..e4cccbd
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>org.apache.hdt.dfs.core</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.eclipse.pde.ManifestBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.eclipse.pde.SchemaBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.pde.PluginNature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+</projectDescription>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.dfs.core/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF b/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..639bafd
--- /dev/null
+++ b/org.apache.hdt.dfs.core/META-INF/MANIFEST.MF
@@ -0,0 +1,15 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools DFS Core
+Bundle-SymbolicName: org.apache.hdt.dfs.core
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.dfs.core.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.apache.hdt.core,
+ org.apache.hadoop.eclipse,
+ org.apache.hdt.ui,
+ org.eclipse.core.resources
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class
new file mode 100644
index 0000000..27ca6d2
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/Activator.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class
new file mode 100644
index 0000000..8abf44f
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContent.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class
new file mode 100644
index 0000000..ca51c50
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$1.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class
new file mode 100644
index 0000000..7851672
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider$2.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class
new file mode 100644
index 0000000..50113d1
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSContentProvider.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class
new file mode 100644
index 0000000..b4e4fdd
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$1.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class
new file mode 100644
index 0000000..a69abbb
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile$IStorageAdapter.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class
new file mode 100644
index 0000000..1fa979b
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFile.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class
new file mode 100644
index 0000000..1a62e05
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder$1.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class
new file mode 100644
index 0000000..f5a9625
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSFolder.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class
new file mode 100644
index 0000000..2f1697b
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation$1.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class
new file mode 100644
index 0000000..6f73f90
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocation.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class
new file mode 100644
index 0000000..87fbd0b
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot$1.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class
new file mode 100644
index 0000000..e342c1c
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSLocationsRoot.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class
new file mode 100644
index 0000000..252d9b7
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSMessage.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class
new file mode 100644
index 0000000..1b9086b
Binary files /dev/null and b/org.apache.hdt.dfs.core/bin/org/apache/hdt/dfs/core/DFSPath.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/build.properties
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/build.properties b/org.apache.hdt.dfs.core/build.properties
new file mode 100644
index 0000000..34d2e4d
--- /dev/null
+++ b/org.apache.hdt.dfs.core/build.properties
@@ -0,0 +1,4 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+               .

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java
new file mode 100644
index 0000000..c9b8c4e
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.dfs.core;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+	// The plug-in ID
+	public static final String PLUGIN_ID = "org.apache.hdt.dfs.core"; //$NON-NLS-1$
+
+	// The shared instance
+	private static Activator plugin;
+	
+	/**
+	 * The constructor
+	 */
+	public Activator() {
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+	 */
+	public void start(BundleContext context) throws Exception {
+		super.start(context);
+		plugin = this;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+	 */
+	public void stop(BundleContext context) throws Exception {
+		plugin = null;
+		super.stop(context);
+	}
+
+	/**
+	 * Returns the shared instance
+	 *
+	 * @return the shared instance
+	 */
+	public static Activator getDefault() {
+		return plugin;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java
new file mode 100644
index 0000000..245b62c
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContent.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+/**
+ * Interface to define content entities in the DFS browser
+ */
+public interface DFSContent {
+
+  boolean hasChildren();
+  
+  DFSContent[] getChildren();
+  
+  void refresh();
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java
new file mode 100644
index 0000000..1d59d61
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSContentProvider.java
@@ -0,0 +1,244 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hdt.ui.ImageLibrary;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.eclipse.jface.viewers.ILabelProvider;
+import org.eclipse.jface.viewers.ILabelProviderListener;
+import org.eclipse.jface.viewers.ITreeContentProvider;
+import org.eclipse.jface.viewers.StructuredViewer;
+import org.eclipse.jface.viewers.Viewer;
+import org.eclipse.swt.graphics.Image;
+import org.eclipse.swt.widgets.Display;
+
+/**
+ * Handles viewing of DFS locations
+ * <p>
+ * 
+ * The content handled by this provider is a tree:
+ * 
+ * <tt>
+ * <br>DFSLocationsRoot
+ * <br>\_HadoopCluster
+ * <br>|  \_DfsFolder
+ * <br>|  |  \_DfsFile
+ * <br>|  \_DfsFolder
+ * <br>| ...
+ * <br>\_HadoopCluster...
+ * </tt>
+ * 
+ * The code should not block here: blocking operations need to be done
+ * asynchronously so as not to freeze the UI!
+ */
+public class DFSContentProvider implements ITreeContentProvider,
+    ILabelProvider {
+
+  /**
+   * The viewer that displays this Tree content
+   */
+  private Viewer viewer;
+
+  private StructuredViewer sviewer;
+
+  private Map<HadoopCluster, DFSContent> rootFolders =
+      new HashMap<HadoopCluster, DFSContent>();
+
+  /**
+   * Constructor: load resources (icons).
+   */
+  public DFSContentProvider() {
+  }
+
+  private final DFSLocationsRoot locationsRoot = new DFSLocationsRoot(this);
+
+  /*
+   * ITreeContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public Object[] getChildren(Object parent) {
+
+    if (!(parent instanceof DFSContent))
+      return null;
+    DFSContent content = (DFSContent) parent;
+    return content.getChildren();
+  }
+
+  public Object[] test(Object parentElement) {
+    if (parentElement instanceof DFSLocationsRoot) {
+      return ServerRegistry.getInstance().getServers().toArray();
+
+    } else if (parentElement instanceof HadoopCluster) {
+      final HadoopCluster location = (HadoopCluster) parentElement;
+      Object root = rootFolders.get(location);
+      if (root != null)
+        return new Object[] { root };
+
+      return new Object[] { "Connecting to DFS..." };
+
+    } else if (parentElement instanceof DFSFolder) {
+      DFSFolder folder = (DFSFolder) parentElement;
+      return folder.getChildren();
+    }
+
+    return new Object[] { "<Unknown DFSContent>" };
+  }
+
+  /* @inheritDoc */
+  public Object getParent(Object element) {
+
+    if (element instanceof DFSPath) {
+      return ((DFSPath) element).getParent();
+
+    } else if (element instanceof HadoopCluster) {
+      return locationsRoot;
+    }
+
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren(Object element) {
+    if (element instanceof DFSContent) {
+      DFSContent content = (DFSContent) element;
+      return content.hasChildren();
+    }
+    return false;
+  }
+
+  /*
+   * IStructureContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public Object[] getElements(final Object inputElement) {
+    return new Object[] { locationsRoot };
+    // return ServerRegistry.getInstance().getServers().toArray();
+  }
+
+  /*
+   * ILabelProvider implementation
+   */
+
+  /* @inheritDoc */
+  public Image getImage(Object element) {
+    if (element instanceof DFSLocationsRoot)
+      return ImageLibrary.getImage("dfs.browser.root.entry");
+
+    else if (element instanceof DFSLocation)
+      return ImageLibrary.getImage("dfs.browser.location.entry");
+
+    else if (element instanceof DFSFolder)
+      return ImageLibrary.getImage("dfs.browser.folder.entry");
+
+    else if (element instanceof DFSFile)
+      return ImageLibrary.getImage("dfs.browser.file.entry");
+
+    return null;
+  }
+
+  /* @inheritDoc */
+  public String getText(Object element) {
+    if (element instanceof DFSFile)
+      return ((DFSFile) element).toDetailedString();
+
+    return element.toString();
+  }
+
+  /*
+   * IBaseLabelProvider implementation
+   */
+
+  /* @inheritDoc */
+  public void addListener(ILabelProviderListener listener) {
+  }
+
+  /* @inheritDoc */
+  public void removeListener(ILabelProviderListener listener) {
+  }
+
+  /* @inheritDoc */
+  public boolean isLabelProperty(Object element, String property) {
+    return false;
+  }
+
+  /*
+   * IContentProvider implementation
+   */
+
+  /* @inheritDoc */
+  public void dispose() {
+  }
+
+  /* @inheritDoc */
+  public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
+    this.viewer = viewer;
+    if ((viewer != null) && (viewer instanceof StructuredViewer))
+      this.sviewer = (StructuredViewer) viewer;
+    else
+      this.sviewer = null;
+  }
+
+  /*
+   * Miscellaneous
+   */
+
+  /**
+   * Ask the viewer for this content to refresh
+   */
+  void refresh() {
+    // no display, nothing to update
+    if (this.viewer == null)
+      return;
+
+    Display.getDefault().asyncExec(new Runnable() {
+      public void run() {
+        DFSContentProvider.this.viewer.refresh();
+      }
+    });
+  }
+
+  /**
+   * Ask the viewer to refresh a single element
+   * 
+   * @param content what to refresh
+   */
+  void refresh(final DFSContent content) {
+    if (this.sviewer != null) {
+      Display.getDefault().asyncExec(new Runnable() {
+        public void run() {
+          DFSContentProvider.this.sviewer.refresh(content);
+        }
+      });
+
+    } else {
+      refresh();
+    }
+  }
+
+  Viewer getViewer() {
+    return this.viewer;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java
new file mode 100644
index 0000000..f739607
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFile.java
@@ -0,0 +1,350 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+
+import org.apache.hdt.dfs.core.Activator;
+import org.apache.hdt.core.dialogs.ErrorMessageDialog;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.eclipse.core.resources.IStorage;
+import org.eclipse.core.runtime.CoreException;
+import org.eclipse.core.runtime.IPath;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.PlatformObject;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.jface.dialogs.MessageDialog;
+import org.eclipse.jface.operation.IRunnableWithProgress;
+import org.eclipse.ui.PlatformUI;
+
+/**
+ * File handling methods for the DFS
+ */
+public class DFSFile extends DFSPath implements DFSContent {
+
+  protected long length;
+
+  protected short replication;
+
+  /**
+   * Constructor to upload a file on the distributed file system
+   * 
+   * @param parent
+   * @param path
+   * @param file
+   * @param monitor
+   */
+  public DFSFile(DFSPath parent, Path path, File file,
+      IProgressMonitor monitor) {
+
+    super(parent, path);
+    this.upload(monitor, file);
+  }
+
+  public DFSFile(DFSPath parent, Path path) {
+    super(parent, path);
+
+    try {
+      FileStatus fs = getDFS().getFileStatus(path);
+      this.length = fs.getLen();
+      this.replication = fs.getReplication();
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
+  }
+
+  /**
+   * Download and view contents of a file
+   * 
+   * @return a InputStream for the file
+   */
+  public InputStream open() throws IOException {
+
+    return getDFS().open(this.path);
+  }
+
+  /**
+   * Download this file to the local file system. This creates a download
+   * status monitor.
+   * 
+   * @param file
+   * @throws JSchException
+   * @throws IOException
+   * @throws InvocationTargetException
+   * @throws InterruptedException
+   * 
+   * @deprecated
+   */
+  public void downloadToLocalFile(final File file)
+      throws InvocationTargetException, InterruptedException {
+
+    PlatformUI.getWorkbench().getProgressService().busyCursorWhile(
+        new IRunnableWithProgress() {
+          public void run(IProgressMonitor monitor)
+              throws InvocationTargetException {
+
+            DFSFile.this.downloadToLocalFile(monitor, file);
+          }
+        });
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+
+    File dfsPath = new File(this.getPath().toString());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (destination.exists()) {
+      boolean answer =
+          MessageDialog.openQuestion(null, "Overwrite existing local file?",
+              "The file you are attempting to download from the DFS "
+                  + this.getPath()
+                  + ", already exists in your local directory as "
+                  + destination + ".\n" + "Overwrite the existing file?");
+      if (!answer)
+        return;
+    }
+
+    try {
+      this.downloadToLocalFile(monitor, destination);
+
+    } catch (Exception e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Download to local file system",
+          "Downloading of file \"" + this.path + "\" to local directory \""
+              + dir + "\" has failed.\n" + e);
+    }
+  }
+
+  /**
+   * Provides a detailed string for this file
+   * 
+   * @return the string formatted as
+   *         <tt>&lt;filename&gt; (&lt;size&gt;, r&lt;replication&gt;)</tt>
+   */
+  public String toDetailedString() {
+    final String[] units = { "b", "Kb", "Mb", "Gb", "Tb" };
+    int unit = 0;
+    double l = this.length;
+    while ((l >= 1024.0) && (unit < units.length)) {
+      unit += 1;
+      l /= 1024.0;
+    }
+
+    return String.format("%s (%.1f %s, r%d)", super.toString(), l,
+        units[unit], this.replication);
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.path.toString();
+  }
+
+  /*
+   * 
+   */
+
+  /**
+   * Download the DfsFile to a local file. Use the given monitor to report
+   * status of operation.
+   * 
+   * @param monitor the status monitor
+   * @param file the local file where to put the downloaded file
+   * @throws InvocationTargetException
+   */
+  public void downloadToLocalFile(IProgressMonitor monitor, File file)
+      throws InvocationTargetException {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Download file " + this.path);
+
+    BufferedOutputStream ostream = null;
+    DataInputStream istream = null;
+
+    try {
+      istream = getDFS().open(this.path);
+      ostream = new BufferedOutputStream(new FileOutputStream(file));
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      throw new InvocationTargetException(e);
+
+    } finally {
+      // Clean all opened resources
+      if (istream != null) {
+        try {
+          istream.close();
+        } catch (IOException e) {
+          e.printStackTrace();
+          // nothing we can do here
+        }
+      }
+      try {
+        ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /**
+   * Upload a local file to this file on the distributed file system
+   * 
+   * @param monitor
+   * @param file
+   */
+  public void upload(IProgressMonitor monitor, File file) {
+
+    final int taskSize = 1024;
+
+    monitor.setTaskName("Upload file " + this.path);
+
+    BufferedInputStream istream = null;
+    DataOutputStream ostream = null;
+
+    try {
+      istream = new BufferedInputStream(new FileInputStream(file));
+      ostream = getDFS().create(this.path);
+
+      int bytes;
+      byte[] buffer = new byte[taskSize];
+
+      while ((bytes = istream.read(buffer)) >= 0) {
+        if (monitor.isCanceled())
+          return;
+        ostream.write(buffer, 0, bytes);
+        monitor.worked(1);
+      }
+
+    } catch (Exception e) {
+      ErrorMessageDialog.display(String.format(
+          "Unable to uploade file %s to %s", file, this.path), e
+          .getLocalizedMessage());
+
+    } finally {
+      try {
+        if (istream != null)
+          istream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+      try {
+        if (ostream != null)
+          ostream.close();
+      } catch (IOException e) {
+        e.printStackTrace();
+        // nothing we can do here
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    getParent().refresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int computeDownloadWork() {
+    return 1 + (int) (this.length / 1024);
+  }
+
+  /**
+   * Creates an adapter for the file to open it in the Editor
+   * 
+   * @return the IStorage
+   */
+  public IStorage getIStorage() {
+    return new IStorageAdapter();
+  }
+
+  /**
+   * IStorage adapter to open the file in the Editor
+   */
+  private class IStorageAdapter extends PlatformObject implements IStorage {
+
+    /* @inheritDoc */
+    public InputStream getContents() throws CoreException {
+      try {
+        return DFSFile.this.open();
+
+      } catch (IOException ioe) {
+        throw new CoreException(new Status(Status.ERROR,
+                Activator.PLUGIN_ID, 0, "Unable to open file \""
+                + DFSFile.this.path + "\"", ioe));
+      }
+    }
+
+    /* @inheritDoc */
+    public IPath getFullPath() {
+      return new org.eclipse.core.runtime.Path(DFSFile.this.path.toString());
+    }
+
+    /* @inheritDoc */
+    public String getName() {
+      return DFSFile.this.path.getName();
+    }
+
+    /* @inheritDoc */
+    public boolean isReadOnly() {
+      return true;
+    }
+
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java
new file mode 100644
index 0000000..55fc8be
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSFolder.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * Local representation of a folder in the DFS.
+ * 
+ * The constructor creates an empty representation of the folder and spawn a
+ * thread that will fill
+ */
+public class DFSFolder extends DFSPath implements DFSContent {
+
+  static Logger log = Logger.getLogger(DFSFolder.class.getName());
+
+  private DFSContent[] children;
+
+  protected DFSFolder(DFSContentProvider provider, HadoopCluster location)
+      throws IOException {
+
+    super(provider, location);
+  }
+
+  private DFSFolder(DFSPath parent, Path path) {
+    super(parent, path);
+  }
+
+  protected void loadDFSFolderChildren() throws IOException {
+    List<DFSPath> list = new ArrayList<DFSPath>();
+
+    for (FileStatus status : getDFS().listStatus(this.getPath())) {
+      if (status.isDir()) {
+        list.add(new DFSFolder(this, status.getPath()));
+      } else {
+        list.add(new DFSFile(this, status.getPath()));
+      }
+    }
+
+    this.children = list.toArray(new DFSContent[list.size()]);
+  }
+
+  /**
+   * Upload the given file or directory into this DfsFolder
+   * 
+   * @param file
+   * @throws IOException
+   */
+  public void upload(IProgressMonitor monitor, final File file)
+      throws IOException {
+
+    if (file.isDirectory()) {
+      Path filePath = new Path(this.path, file.getName());
+      getDFS().mkdirs(filePath);
+      DFSFolder newFolder = new DFSFolder(this, filePath);
+      monitor.worked(1);
+      for (File child : file.listFiles()) {
+        if (monitor.isCanceled())
+          return;
+        newFolder.upload(monitor, child);
+      }
+
+    } else if (file.isFile()) {
+      Path filePath = new Path(this.path, file.getName());
+      DFSFile newFile = new DFSFile(this, filePath, file, monitor);
+
+    } else {
+      // XXX don't know what the file is?
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void downloadToLocalDirectory(IProgressMonitor monitor, File dir) {
+    if (!dir.exists())
+      dir.mkdirs();
+
+    if (!dir.isDirectory()) {
+      MessageDialog.openError(null, "Download to local file system",
+          "Invalid directory location: \"" + dir + "\"");
+      return;
+    }
+
+    File dfsPath = new File(this.getPath().toString());
+    File destination = new File(dir, dfsPath.getName());
+
+    if (!destination.exists()) {
+      if (!destination.mkdir()) {
+        MessageDialog.openError(null, "Download to local directory",
+            "Unable to create directory " + destination.getAbsolutePath());
+        return;
+      }
+    }
+
+    // Download all DfsPath children
+    for (Object childObj : getChildren()) {
+      if (childObj instanceof DFSPath) {
+        ((DFSPath) childObj).downloadToLocalDirectory(monitor, destination);
+        monitor.worked(1);
+      }
+    }
+  }
+
+  /* @inheritDoc */
+  @Override
+  public int computeDownloadWork() {
+    int work = 1;
+    for (DFSContent child : getChildren()) {
+      if (child instanceof DFSPath)
+        work += ((DFSPath) child).computeDownloadWork();
+    }
+
+    return work;
+  }
+
+  /**
+   * Create a new sub directory into this directory
+   * 
+   * @param folderName
+   */
+  public void mkdir(String folderName) {
+    try {
+      getDFS().mkdirs(new Path(this.path, folderName));
+    } catch (IOException ioe) {
+      ioe.printStackTrace();
+    }
+    doRefresh();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    if (this.children == null)
+      return true;
+    else
+      return (this.children.length > 0);
+  }
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (children == null) {
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            loadDFSFolderChildren();
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            children =
+                new DFSContent[] { new DFSMessage("Error: "
+                    + ioe.getLocalizedMessage()) };
+            return Status.CANCEL_STATUS;
+
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSFolder.this);
+          }
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Listing folder content...") };
+    }
+    return this.children;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public void refresh() {
+    this.children = null;
+    this.doRefresh();
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return String.format("%s (%s)", super.toString(),
+        this.getChildren().length);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java
new file mode 100644
index 0000000..6b6aff5
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocation.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.IOException;
+
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
+import org.eclipse.core.runtime.Status;
+import org.eclipse.core.runtime.jobs.Job;
+
+/**
+ * DFS Content representation of a HDFS location
+ */
+public class DFSLocation implements DFSContent {
+
+  private final DFSContentProvider provider;
+
+  private final HadoopCluster location;
+
+  private DFSContent rootFolder = null;
+
+  DFSLocation(DFSContentProvider provider, HadoopCluster server) {
+    this.provider = provider;
+    this.location = server;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.location.getLocationName();
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    if (this.rootFolder == null) {
+      /*
+       * DfsFolder constructor might block as it contacts the NameNode: work
+       * asynchronously here or this will potentially freeze the UI
+       */
+      new Job("Connecting to DFS " + location) {
+        @Override
+        protected IStatus run(IProgressMonitor monitor) {
+          try {
+            rootFolder = new DFSFolder(provider, location);
+            return Status.OK_STATUS;
+
+          } catch (IOException ioe) {
+            rootFolder =
+                new DFSMessage("Error: " + ioe.getLocalizedMessage());
+            return Status.CANCEL_STATUS;
+
+          } finally {
+            // Under all circumstances, update the UI
+            provider.refresh(DFSLocation.this);
+          }
+        }
+      }.schedule();
+
+      return new DFSContent[] { new DFSMessage("Connecting to DFS "
+          + toString()) };
+    }
+    return new DFSContent[] { this.rootFolder };
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return true;
+  }
+  
+  /* @inheritDoc */
+  public void refresh() {
+    this.rootFolder = null;
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+  
+  /**
+   * Refresh the location using a new connection
+   */
+  public void reconnect() {
+    this.refresh();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java
new file mode 100644
index 0000000..1edec58
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSLocationsRoot.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hdt.core.cluster.IHadoopClusterListener;
+import org.apache.hdt.core.cluster.ServerRegistry;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hdt.core.cluster.HadoopCluster;
+
+/**
+ * Representation of the root element containing all DFS servers. This
+ * content registers an observer on Hadoop servers so as to update itself
+ * when servers are updated.
+ */
+public class DFSLocationsRoot implements DFSContent, IHadoopClusterListener {
+
+  /**
+   * 
+   */
+  private final DFSContentProvider provider;
+
+  private Map<HadoopCluster, DFSLocation> map =
+      new HashMap<HadoopCluster, DFSLocation>();
+
+  /**
+   * Register a listeners to track DFS locations updates
+   * 
+   * @param provider the content provider this content is the root of
+   */
+  DFSLocationsRoot(DFSContentProvider provider) {
+    this.provider = provider;
+    ServerRegistry.getInstance().addListener(this);
+    this.refresh();
+  }
+
+  /*
+   * Implementation of IHadoopServerListener
+   */
+
+  /* @inheritDoc */
+  public synchronized void serverChanged(final HadoopCluster location,
+      final int type) {
+
+    switch (type) {
+      case ServerRegistry.SERVER_STATE_CHANGED: {
+        this.provider.refresh(map.get(location));
+        break;
+      }
+
+      case ServerRegistry.SERVER_ADDED: {
+        DFSLocation dfsLoc = new DFSLocation(provider, location);
+        map.put(location, dfsLoc);
+        this.provider.refresh(this);
+        break;
+      }
+
+      case ServerRegistry.SERVER_REMOVED: {
+        map.remove(location);
+        this.provider.refresh(this);
+        break;
+      }
+    }
+  }
+
+  /**
+   * Recompute the map of Hadoop locations
+   */
+  private synchronized void reloadLocations() {
+    map.clear();
+    for (HadoopCluster location : ServerRegistry.getInstance().getServers())
+      map.put(location, new DFSLocation(provider, location));
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return "DFS Locations";
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public synchronized DFSContent[] getChildren() {
+    return this.map.values().toArray(new DFSContent[this.map.size()]);
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return (this.map.size() > 0);
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    reloadLocations();
+    this.provider.refresh(this);
+  }
+
+  /*
+   * Actions
+   */
+
+  public void disconnect() {
+    Thread closeThread = new Thread() {
+      /* @inheritDoc */
+      @Override
+      public void run() {
+        try {
+          System.out.printf("Closing all opened File Systems...\n");
+          FileSystem.closeAll();
+          System.out.printf("File Systems closed\n");
+
+        } catch (IOException ioe) {
+          ioe.printStackTrace();
+        }
+      }
+    };
+
+    // Wait 5 seconds for the connections to be closed
+    closeThread.start();
+    try {
+      closeThread.join(5000);
+
+    } catch (InterruptedException ie) {
+      // Ignore
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java
new file mode 100644
index 0000000..0d25d45
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSMessage.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+/**
+ * DFS Content that displays a message.
+ */
+class DFSMessage implements DFSContent {
+
+  private String message;
+
+  DFSMessage(String message) {
+    this.message = message;
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    return this.message;
+  }
+
+  /*
+   * Implementation of DFSContent
+   */
+
+  /* @inheritDoc */
+  public DFSContent[] getChildren() {
+    return null;
+  }
+
+  /* @inheritDoc */
+  public boolean hasChildren() {
+    return false;
+  }
+
+  /* @inheritDoc */
+  public void refresh() {
+    // Nothing to do
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java
new file mode 100644
index 0000000..1cff452
--- /dev/null
+++ b/org.apache.hdt.dfs.core/src/org/apache/hdt/dfs/core/DFSPath.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hdt.dfs.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hdt.core.dialogs.ErrorMessageDialog;
+import org.apache.hdt.core.cluster.ConfProp;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hdt.core.cluster.HadoopCluster;
+import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.jface.dialogs.MessageDialog;
+
+/**
+ * DFS Path handling for DFS
+ */
+public abstract class DFSPath implements DFSContent {
+
+  protected final DFSContentProvider provider;
+
+  protected HadoopCluster location;
+
+  private DistributedFileSystem dfs = null;
+
+  protected final Path path;
+
+  protected final DFSPath parent;
+
+  /**
+   * For debugging purpose
+   */
+  static Logger log = Logger.getLogger(DFSPath.class.getName());
+
+  /**
+   * Create a path representation for the given location in the given viewer
+   * 
+   * @param location
+   * @param path
+   * @param viewer
+   */
+  public DFSPath(DFSContentProvider provider, HadoopCluster location)
+      throws IOException {
+
+    this.provider = provider;
+    this.location = location;
+    this.path = new Path("/");
+    this.parent = null;
+  }
+
+  /**
+   * Create a sub-path representation for the given parent path
+   * 
+   * @param parent
+   * @param path
+   */
+  protected DFSPath(DFSPath parent, Path path) {
+    this.provider = parent.provider;
+    this.location = parent.location;
+    this.dfs = parent.dfs;
+    this.parent = parent;
+    this.path = path;
+  }
+
+  protected void dispose() {
+    // Free the DFS connection
+  }
+
+  /* @inheritDoc */
+  @Override
+  public String toString() {
+    if (path.equals("/")) {
+      return location.getConfProp(ConfProp.FS_DEFAULT_URI);
+
+    } else {
+      return this.path.getName();
+    }
+  }
+
+  /**
+   * Does a recursive delete of the remote directory tree at this node.
+   */
+  public void delete() {
+    try {
+      getDFS().delete(this.path, true);
+
+    } catch (IOException e) {
+      e.printStackTrace();
+      MessageDialog.openWarning(null, "Delete file",
+          "Unable to delete file \"" + this.path + "\"\n" + e);
+    }
+  }
+
+  public DFSPath getParent() {
+    return parent;
+  }
+
+  public abstract void refresh();
+
+  /**
+   * Refresh the UI element for this content
+   */
+  public void doRefresh() {
+    provider.refresh(this);
+  }
+
+  /**
+   * Copy the DfsPath to the given local directory
+   * 
+   * @param directory the local directory
+   */
+  public abstract void downloadToLocalDirectory(IProgressMonitor monitor,
+      File dir);
+
+  public Path getPath() {
+    return this.path;
+  }
+
+  /**
+   * Gets a connection to the DFS
+   * 
+   * @return a connection to the DFS
+   * @throws IOException
+   */
+  DistributedFileSystem getDFS() throws IOException {
+    if (this.dfs == null) {
+      FileSystem fs = location.getDFS();
+      if (!(fs instanceof DistributedFileSystem)) {
+        ErrorMessageDialog.display("DFS Browser",
+            "The DFS Browser cannot browse anything else "
+                + "but a Distributed File System!");
+        throw new IOException("DFS Browser expects a DistributedFileSystem!");
+      }
+      this.dfs = (DistributedFileSystem) fs;
+    }
+    return this.dfs;
+  }
+
+  public abstract int computeDownloadWork();
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/.classpath
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/.classpath b/org.apache.hdt.dfs.ui/.classpath
new file mode 100644
index 0000000..ad32c83
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+	<classpathentry kind="src" path="src"/>
+	<classpathentry kind="output" path="bin"/>
+</classpath>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/.project
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/.project b/org.apache.hdt.dfs.ui/.project
new file mode 100644
index 0000000..efb117d
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.project
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>org.apache.hdt.dfs.ui</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.eclipse.pde.ManifestBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.eclipse.pde.SchemaBuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.pde.PluginNature</nature>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+	</natures>
+</projectDescription>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs b/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..c537b63
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,7 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.source=1.6

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF b/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF
new file mode 100644
index 0000000..b231b7f
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/META-INF/MANIFEST.MF
@@ -0,0 +1,14 @@
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: Hadoop Development Tools HDFS UI
+Bundle-SymbolicName: org.apache.hdt.dfs.ui;singleton:=true
+Bundle-Version: 0.0.0
+Bundle-Activator: org.apache.hdt.dfs.ui.Activator
+Bundle-Vendor: Apache Software Foundation
+Require-Bundle: org.eclipse.ui,
+ org.eclipse.core.runtime,
+ org.eclipse.wst.server.core,
+ org.apache.hadoop.eclipse,
+ org.apache.hdt.dfs.core
+Bundle-RequiredExecutionEnvironment: JavaSE-1.6
+Bundle-ActivationPolicy: lazy

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class b/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class
new file mode 100644
index 0000000..cacc268
Binary files /dev/null and b/org.apache.hdt.dfs.ui/bin/org/apache/hdt/dfs/ui/Activator.class differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/build.properties
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/build.properties b/org.apache.hdt.dfs.ui/build.properties
new file mode 100644
index 0000000..8de5aee
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/build.properties
@@ -0,0 +1,6 @@
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+               .,\
+               plugin.xml,\
+               resources/

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/plugin.xml
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/plugin.xml b/org.apache.hdt.dfs.ui/plugin.xml
new file mode 100644
index 0000000..53f60a5
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/plugin.xml
@@ -0,0 +1,50 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?eclipse version="3.4"?>
+<plugin>
+   <extension
+         point="org.eclipse.ui.navigator.navigatorContent">
+      <navigatorContent
+            activeByDefault="true"
+            contentProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
+            icon="resources/elephantblue16x16.gif"
+            id="org.apache.hadoop.eclipse.views.dfscontent"
+            labelProvider="org.apache.hadoop.eclipse.dfs.DFSContentProvider"
+            name="Hadoop Distributed File Systems"
+            priority="normal"
+            providesSaveables="false">
+         <triggerPoints>
+            <or>
+               <instanceof
+                     value="org.apache.hadoop.eclipse.dfs.DFSPath">
+               </instanceof>
+               <adapt
+                     type="org.eclipse.core.resources.IResource">
+                  <test
+                        forcePluginActivation="true"
+                        property="mapreduce.deployable">
+                  </test>
+               </adapt>
+            </or>
+         </triggerPoints>
+         <actionProvider
+               class="org.apache.hadoop.eclipse.dfs.ActionProvider">
+         </actionProvider>
+         <possibleChildren>
+            <or>
+               <instanceof
+                     value="org.eclipse.wst.server.core.IServer">
+               </instanceof>
+               <instanceof
+                     value="org.apache.hdt.dfs.core.DFSLocationsRoot">
+               </instanceof>
+               <instanceof
+                     value="org.apache.hdt.dfs.core.DFSLocation">
+               </instanceof>
+               <instanceof
+                     value="org.apache.hdt.dfs.core.DFSPath">
+               </instanceof>
+            </or>
+         </possibleChildren>
+      </navigatorContent>
+   </extension>
+</plugin>

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif b/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif
new file mode 100644
index 0000000..0927b13
Binary files /dev/null and b/org.apache.hdt.dfs.ui/resources/elephantblue16x16.gif differ

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java b/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java
new file mode 100644
index 0000000..4501d3e
--- /dev/null
+++ b/org.apache.hdt.dfs.ui/src/org/apache/hdt/dfs/ui/Activator.java
@@ -0,0 +1,50 @@
+package org.apache.hdt.dfs.ui;
+
+import org.eclipse.ui.plugin.AbstractUIPlugin;
+import org.osgi.framework.BundleContext;
+
+/**
+ * The activator class controls the plug-in life cycle
+ */
+public class Activator extends AbstractUIPlugin {
+
+	// The plug-in ID
+	public static final String PLUGIN_ID = "org.apache.hdt.dfs.ui"; //$NON-NLS-1$
+
+	// The shared instance
+	private static Activator plugin;
+	
+	/**
+	 * The constructor
+	 */
+	public Activator() {
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * @see org.eclipse.ui.plugin.AbstractUIPlugin#start(org.osgi.framework.BundleContext)
+	 */
+	public void start(BundleContext context) throws Exception {
+		super.start(context);
+		plugin = this;
+	}
+
+	/*
+	 * (non-Javadoc)
+	 * @see org.eclipse.ui.plugin.AbstractUIPlugin#stop(org.osgi.framework.BundleContext)
+	 */
+	public void stop(BundleContext context) throws Exception {
+		plugin = null;
+		super.stop(context);
+	}
+
+	/**
+	 * Returns the shared instance
+	 *
+	 * @return the shared instance
+	 */
+	public static Activator getDefault() {
+		return plugin;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.ui/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/META-INF/MANIFEST.MF b/org.apache.hdt.ui/META-INF/MANIFEST.MF
index 4195c59..be1cd74 100644
--- a/org.apache.hdt.ui/META-INF/MANIFEST.MF
+++ b/org.apache.hdt.ui/META-INF/MANIFEST.MF
@@ -16,3 +16,4 @@ Require-Bundle: org.eclipse.ui,
  org.eclipse.ui.console
 Bundle-RequiredExecutionEnvironment: JavaSE-1.6
 Bundle-ActivationPolicy: lazy
+Export-Package: org.apache.hdt.ui

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java
deleted file mode 100644
index b1e172b..0000000
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/cluster/ServerRegistry.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hdt.ui.cluster;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hdt.core.cluster.HadoopCluster;
-import org.apache.hdt.core.cluster.IHadoopClusterListener;
-import org.apache.hdt.ui.Activator;
-import org.eclipse.jface.dialogs.MessageDialog;
-
-/**
- * Register of Hadoop locations.
- * 
- * Each location corresponds to a Hadoop {@link Configuration} stored as an
- * XML file in the workspace plug-in configuration directory:
- * <p>
- * <tt>
- * &lt;workspace-dir&gt;/.metadata/.plugins/org.apache.hadoop.eclipse/locations/*.xml
- * </tt>
- * 
- */
-public class ServerRegistry {
-
-  private static final ServerRegistry INSTANCE = new ServerRegistry();
-
-  public static final int SERVER_ADDED = 0;
-
-  public static final int SERVER_REMOVED = 1;
-
-  public static final int SERVER_STATE_CHANGED = 2;
-
-  private final File baseDir =
-      Activator.getDefault().getStateLocation().toFile();
-
-  private final File saveDir = new File(baseDir, "locations");
-
-  private ServerRegistry() {
-    if (saveDir.exists() && !saveDir.isDirectory())
-      saveDir.delete();
-    if (!saveDir.exists())
-      saveDir.mkdirs();
-
-    load();
-  }
-
-  private Map<String, HadoopCluster> servers;
-
-  private Set<IHadoopClusterListener> listeners =
-      new HashSet<IHadoopClusterListener>();
-
-  public static ServerRegistry getInstance() {
-    return INSTANCE;
-  }
-
-  public synchronized Collection<HadoopCluster> getServers() {
-    return Collections.unmodifiableCollection(servers.values());
-  }
-
-  /**
-   * Load all available locations from the workspace configuration directory.
-   */
-  private synchronized void load() {
-    Map<String, HadoopCluster> map = new TreeMap<String, HadoopCluster>();
-    for (File file : saveDir.listFiles()) {
-      try {
-        HadoopCluster server = new HadoopCluster(file);
-        map.put(server.getLocationName(), server);
-
-      } catch (Exception exn) {
-        System.err.println(exn);
-      }
-    }
-    this.servers = map;
-  }
-
-  private synchronized void store() {
-    try {
-      File dir = File.createTempFile("locations", "new", baseDir);
-      dir.delete();
-      dir.mkdirs();
-
-      for (HadoopCluster server : servers.values()) {
-        server.storeSettingsToFile(new File(dir, server.getLocationName()
-            + ".xml"));
-      }
-
-      FilenameFilter XMLFilter = new FilenameFilter() {
-        public boolean accept(File dir, String name) {
-          String lower = name.toLowerCase();
-          return lower.endsWith(".xml");
-        }
-      };
-
-      File backup = new File(baseDir, "locations.backup");
-      if (backup.exists()) {
-        for (File file : backup.listFiles(XMLFilter))
-          if (!file.delete())
-            throw new IOException("Unable to delete backup location file: "
-                + file);
-        if (!backup.delete())
-          throw new IOException(
-              "Unable to delete backup location directory: " + backup);
-      }
-
-      saveDir.renameTo(backup);
-      dir.renameTo(saveDir);
-
-    } catch (IOException ioe) {
-      ioe.printStackTrace();
-      MessageDialog.openError(null,
-          "Saving configuration of Hadoop locations failed", ioe.toString());
-    }
-  }
-
-  public void dispose() {
-    for (HadoopCluster server : getServers()) {
-      server.dispose();
-    }
-  }
-
-  public synchronized HadoopCluster getServer(String location) {
-    return servers.get(location);
-  }
-
-  /*
-   * HadoopServer map listeners
-   */
-
-  public void addListener(IHadoopClusterListener l) {
-    synchronized (listeners) {
-      listeners.add(l);
-    }
-  }
-
-  public void removeListener(IHadoopClusterListener l) {
-    synchronized (listeners) {
-      listeners.remove(l);
-    }
-  }
-
-  private void fireListeners(HadoopCluster location, int kind) {
-    synchronized (listeners) {
-      for (IHadoopClusterListener listener : listeners) {
-        listener.serverChanged(location, kind);
-      }
-    }
-  }
-
-  public synchronized void removeServer(HadoopCluster server) {
-    this.servers.remove(server.getLocationName());
-    store();
-    fireListeners(server, SERVER_REMOVED);
-  }
-
-  public synchronized void addServer(HadoopCluster server) {
-    this.servers.put(server.getLocationName(), server);
-    store();
-    fireListeners(server, SERVER_ADDED);
-  }
-
-  /**
-   * Update one Hadoop location
-   * 
-   * @param originalName the original location name (might have changed)
-   * @param server the location
-   */
-  public synchronized void updateServer(String originalName,
-      HadoopCluster server) {
-
-    // Update the map if the location name has changed
-    if (!server.getLocationName().equals(originalName)) {
-      servers.remove(originalName);
-      servers.put(server.getLocationName(), server);
-    }
-    store();
-    fireListeners(server, SERVER_STATE_CHANGED);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
index 711ccf3..40c43f7 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/views/ClusterView.java
@@ -24,11 +24,11 @@ import org.apache.hdt.core.cluster.HadoopCluster;
 import org.apache.hdt.core.cluster.HadoopJob;
 import org.apache.hdt.core.cluster.IHadoopClusterListener;
 import org.apache.hdt.core.cluster.IJobListener;
+import org.apache.hdt.core.cluster.ServerRegistry;
 import org.apache.hdt.core.cluster.utils.JarModule;
 import org.apache.hdt.ui.ImageLibrary;
 import org.apache.hdt.ui.actions.EditLocationAction;
 import org.apache.hdt.ui.actions.NewLocationAction;
-import org.apache.hdt.ui.cluster.ServerRegistry;
 import org.eclipse.jface.action.Action;
 import org.eclipse.jface.action.IAction;
 import org.eclipse.jface.action.IMenuListener;

http://git-wip-us.apache.org/repos/asf/incubator-hdt/blob/8b28cfa4/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
----------------------------------------------------------------------
diff --git a/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java b/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
index 8fe9d19..335d32c 100644
--- a/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
+++ b/org.apache.hdt.ui/src/org/apache/hdt/ui/wizards/HadoopLocationWizard.java
@@ -31,7 +31,7 @@ import java.util.Map.Entry;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hdt.core.cluster.ConfProp;
 import org.apache.hdt.core.cluster.HadoopCluster;
-import org.apache.hdt.ui.cluster.ServerRegistry;
+import org.apache.hdt.core.cluster.ServerRegistry;
 import org.eclipse.jface.dialogs.IMessageProvider;
 import org.eclipse.jface.wizard.WizardPage;
 import org.eclipse.swt.SWT;