You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2009/07/03 04:14:47 UTC

svn commit: r790778 [1/2] - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/ src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/ src/contrib/stargate/src/java/org/apache/hado...

Author: apurtell
Date: Fri Jul  3 02:14:46 2009
New Revision: 790778

URL: http://svn.apache.org/viewvc?rev=790778&view=rev
Log:
HBASE-1608, HBASE-1599

Added:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterList.java
Removed:
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterSet.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestFilterSet.java
Modified:
    hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/PageFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/master/HMaster.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/filter/TestPageFilter.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/test/org/apache/hadoop/hbase/io/hfile/TestCachedBlockQueue.java

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt Fri Jul  3 02:14:46 2009
@@ -25,6 +25,7 @@
                handled (holstad, jgray, rawson, stack)
    HBASE-1582  Translate ColumnValueFilter and RowFilterSet to the new
                Filter interface (Clint Morgan and Stack)
+   HBASE-1599  Fix TestFilterSet, broken up on hudson (Jon Gray via Stack)
 
   BUG FIXES
    HBASE-1140  "ant clean test" fails (Nitay Joffe via Stack)
@@ -240,6 +241,7 @@
    HBASE-1594  Fix scan addcolumns after hbase-1385 commit (broken hudson build)
    HBASE-1595  hadoop-default.xml and zoo.cfg in hbase jar
    HBASE-1602  HRegionServer won't go down since we added in new LruBlockCache
+   HBASE-1608  TestCachedBlockQueue failing on some jvms (Jon Gray via Stack)
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Constants.java Fri Jul  3 02:14:46 2009
@@ -20,6 +20,9 @@
 
 package org.apache.hadoop.hbase.stargate;
 
+/**
+ * Common constants for org.apache.hadoop.hbase.stargate
+ */
 public interface Constants {
   public static final String MIMETYPE_TEXT = "text/plain";
   public static final String MIMETYPE_XML = "text/xml";

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/Main.java Fri Jul  3 02:14:46 2009
@@ -24,7 +24,6 @@
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.Options;
 import org.apache.commons.cli.PosixParser;
-import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Handler;
 import org.mortbay.jetty.NCSARequestLog;
@@ -37,6 +36,15 @@
 import org.mortbay.jetty.nio.SelectChannelConnector;
 import org.mortbay.thread.QueuedThreadPool;
 
+/**
+ * Main class for launching Stargate as a servlet hosted by an embedded Jetty
+ * servlet container.
+ * <p> 
+ * The following options are supported:
+ * <ul>
+ * <li>-p: service port</li>
+ * </ul>
+ */
 public class Main {
 
   public static void main(String[] args) throws Exception {
@@ -50,17 +58,6 @@
       port = Integer.valueOf(cmd.getOptionValue("p"));
     }
 
-    HBaseConfiguration conf = new HBaseConfiguration();
-    if (cmd.hasOption("m")) {
-      conf.set("hbase.master", cmd.getOptionValue("m"));
-    }
-
-    /*
-     * RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean(); if (runtime
-     * != null) { LOG.info("vmName=" + runtime.getVmName() + ", vmVendor=" +
-     * runtime.getVmVendor() + ", vmVersion=" + runtime.getVmVersion());
-     * LOG.info("vmInputArguments=" + runtime.getInputArguments()); }
-     */
     /*
      * poached from:
      * http://jetty.mortbay.org/xref/org/mortbay/jetty/example/LikeJettyXml.html

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RESTServlet.java Fri Jul  3 02:14:46 2009
@@ -32,8 +32,10 @@
 import org.apache.hadoop.hbase.util.Bytes;
 
 import com.sun.jersey.server.impl.container.servlet.ServletAdaptor;
-import com.sun.jersey.spi.container.servlet.ServletContainer;
 
+/**
+ * Singleton class encapsulating global REST servlet state and functions.
+ */
 public class RESTServlet extends ServletAdaptor {
   
   private static final long serialVersionUID = 1L;  
@@ -46,6 +48,10 @@
   protected Map<String,Integer> maxAgeMap = 
     Collections.synchronizedMap(new HashMap<String,Integer>());
 
+  /**
+   * @return the RESTServlet singleton instance
+   * @throws IOException
+   */
   public synchronized static RESTServlet getInstance() throws IOException {
     if (instance == null) {
       instance = new RESTServlet();
@@ -53,21 +59,33 @@
     return instance;
   }
 
+  /**
+   * Constructor
+   * @throws IOException
+   */
   public RESTServlet() throws IOException {
     this.conf = new HBaseConfiguration();
   }
 
 
+  /**
+   * Get or create a table pool for the given table. 
+   * @param name the table name
+   * @return the table pool
+   */
   protected HTablePool getTablePool(String name) {
     return HTablePool.getPool(conf, Bytes.toBytes(name));
   }
 
+  /**
+   * @return the servlet's global HBase configuration
+   */
   protected HBaseConfiguration getConfiguration() {
     return conf;
   }
 
   /**
-   * @param tableName
+   * @param tableName the table name
    * @return the maximum cache age suitable for use with this table, in
    *  seconds 
    * @throws IOException
@@ -97,30 +115,12 @@
     return DEFAULT_MAX_AGE;
   }
 
+  /**
+   * Signal that a previously calculated maximum cache age has been
+   * invalidated by a schema change.
+   * @param tableName the table name
+   */
   public void invalidateMaxAge(String tableName) {
     maxAgeMap.remove(tableName);
   }
-
-  public static final String getVersion() {
-    StringBuilder version = new StringBuilder();
-    version.append("Stargate ");
-    version.append(VERSION_STRING);
-    version.append(" [JVM: ");
-    version.append(System.getProperty("java.vm.vendor"));
-    version.append(' ');
-    version.append(System.getProperty("java.version"));
-    version.append('-');
-    version.append(System.getProperty("java.vm.version"));
-    version.append("] [OS: ");
-    version.append(System.getProperty("os.name"));
-    version.append(' ');
-    version.append(System.getProperty("os.version"));
-    version.append(' ');
-    version.append(System.getProperty("os.arch"));
-    version.append("] [Jersey: ");
-    version.append(ServletContainer.class.getPackage()
-      .getImplementationVersion());
-    version.append(']');
-    return version.toString();
-  }  
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/RowSpec.java Fri Jul  3 02:14:46 2009
@@ -27,6 +27,12 @@
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.util.Bytes;
 
+/**
+ * Parses a path based row/column/timestamp specification into its component
+ * elements.
+ * <p>
+ *  
+ */
 public class RowSpec {
   public static final long DEFAULT_START_TIMESTAMP = 0;
   public static final long DEFAULT_END_TIMESTAMP = Long.MAX_VALUE;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/VersionResource.java Fri Jul  3 02:14:46 2009
@@ -34,6 +34,13 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.stargate.model.VersionModel;
 
+/**
+ * Implements Stargate software version reporting via
+ * <p>
+ * <tt>/version/stargate</tt>
+ * <p>
+ * <tt>/version</tt> (alias for <tt>/version/stargate</tt>)
+ */
 @Path(Constants.PATH_VERSION)
 public class VersionResource implements Constants {
   private static final Log LOG = LogFactory.getLog(VersionResource.class);
@@ -46,6 +53,12 @@
     cacheControl.setNoTransform(false);
   }
 
+  /**
+   * Build a response for a version request.
+   * @param context servlet context
+   * @param uriInfo (JAX-RS context variable) request URL
+   * @return a response for a version request 
+   */
   @GET
   @Produces({MIMETYPE_TEXT, MIMETYPE_XML, MIMETYPE_JSON, MIMETYPE_JAVASCRIPT,
     MIMETYPE_PROTOBUF})
@@ -57,7 +70,10 @@
     response.cacheControl(cacheControl);
     return response.build();
   }
-  
+
+  /**
+   * Dispatch <tt>/version/stargate</tt> to self.
+   */
   // "/version/stargate" is an alias for "/version"
   @Path("stargate")
   public VersionResource getVersionResource() {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Client.java Fri Jul  3 02:14:46 2009
@@ -39,6 +39,10 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+/**
+ * A wrapper around HttpClient which provides some useful function and
+ * semantics for interacting with the Stargate REST gateway.
+ */
 public class Client {
   public static final Header[] EMPTY_HEADER_ARRAY = new Header[0];
 
@@ -47,10 +51,17 @@
   private HttpClient httpClient;
   private Cluster cluster;
 
+  /**
+   * Default Constructor
+   */
   public Client() {
     this(null);
   }
 
+  /**
+   * Constructor
+   * @param cluster the cluster definition
+   */
   public Client(Cluster cluster) {
     this.cluster = cluster;
     httpClient = new HttpClient(new MultiThreadedHttpConnectionManager());
@@ -61,37 +72,61 @@
     clientParams.setVersion(HttpVersion.HTTP_1_1);
   }
 
+  /**
+   * Shut down the client. Close any open persistent connections. 
+   */
   public void shutdown() {
     MultiThreadedHttpConnectionManager manager = 
       (MultiThreadedHttpConnectionManager) httpClient.getHttpConnectionManager();
     manager.shutdown();
   }
 
+  /**
+   * Execute a transaction method given only the path. Will select at random
+   * one of the members of the supplied cluster definition and iterate through
+   * the list until a transaction can be successfully completed. The
+   * definition of success here is a complete HTTP transaction, irrespective
+   * of result code.  
+   * @param cluster the cluster definition
+   * @param method the transaction method
+   * @param headers HTTP header values to send
+   * @param path the path
+   * @return the HTTP response code
+   * @throws IOException
+   */
   @SuppressWarnings("deprecation")
-  public int executePathOnly(Cluster c, HttpMethod method, Header[] headers,
-      String path) throws IOException {
+  public int executePathOnly(Cluster cluster, HttpMethod method,
+      Header[] headers, String path) throws IOException {
     IOException lastException;
-    if (c.nodes.size() < 1) {
+    if (cluster.nodes.size() < 1) {
       throw new IOException("Cluster is empty");
     }
-    int start = (int)Math.round((c.nodes.size() - 1) * Math.random());
+    int start = (int)Math.round((cluster.nodes.size() - 1) * Math.random());
     int i = start;
     do {
-      c.lastHost = c.nodes.get(i);
+      cluster.lastHost = cluster.nodes.get(i);
       try {
         StringBuffer sb = new StringBuffer();
         sb.append("http://");
-        sb.append(c.lastHost);
+        sb.append(cluster.lastHost);
         sb.append(path);
         URI uri = new URI(sb.toString());
         return executeURI(method, headers, uri.toString());
       } catch (IOException e) {
         lastException = e;
       }
-    } while (++i != start && i < c.nodes.size());
+    } while (++i != start && i < cluster.nodes.size());
     throw lastException;
   }
 
+  /**
+   * Execute a transaction method given a complete URI.
+   * @param method the transaction method
+   * @param headers HTTP header values to send
+   * @param uri the URI
+   * @return the HTTP response code
+   * @throws IOException
+   */
   @SuppressWarnings("deprecation")
   public int executeURI(HttpMethod method, Header[] headers, String uri)
       throws IOException {
@@ -111,57 +146,133 @@
     return code;
   }
 
-  public int execute(Cluster c, HttpMethod method, Header[] headers,
+  /**
+   * Execute a transaction method. Will call either <tt>executePathOnly</tt>
+   * or <tt>executeURI</tt> depending on whether a path only is supplied in
+   * 'path', or if a complete URI is passed instead, respectively.
+   * @param cluster the cluster definition
+   * @param method the HTTP method
+   * @param headers HTTP header values to send
+   * @param path the path or URI
+   * @return the HTTP response code
+   * @throws IOException
+   */
+  public int execute(Cluster cluster, HttpMethod method, Header[] headers,
       String path) throws IOException {
     if (path.startsWith("/")) {
-      return executePathOnly(c, method, headers, path);
+      return executePathOnly(cluster, method, headers, path);
     }
     return executeURI(method, headers, path);
   }
 
+  /**
+   * @return the cluster definition
+   */
   public Cluster getCluster() {
     return cluster;
   }
 
+  /**
+   * @param cluster the cluster definition
+   */
   public void setCluster(Cluster cluster) {
     this.cluster = cluster;
   }
 
+  /**
+   * Send a HEAD request 
+   * @param path the path or URI
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response head(String path) throws IOException {
-    return head(cluster, path);
+    return head(cluster, path, null);
   }
 
-  public Response head(Cluster c, String path) throws IOException {
+  /**
+   * Send a HEAD request 
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param headers the HTTP headers to include in the request
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response head(Cluster cluster, String path, Header[] headers) 
+      throws IOException {
     HeadMethod method = new HeadMethod();
-    int code = execute(c, method, null, path);
-    Header[] headers = method.getResponseHeaders();
+    int code = execute(cluster, method, null, path);
+    headers = method.getResponseHeaders();
     method.releaseConnection();
     return new Response(code, headers, null);
   }
 
+  /**
+   * Send a GET request 
+   * @param path the path or URI
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response get(String path) throws IOException {
     return get(cluster, path);
   }
 
-  public Response get(Cluster c, String path) throws IOException {
-    return get(c, path, EMPTY_HEADER_ARRAY);
-  }
-
+  /**
+   * Send a GET request 
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response get(Cluster cluster, String path) throws IOException {
+    return get(cluster, path, EMPTY_HEADER_ARRAY);
+  }
+
+  /**
+   * Send a GET request 
+   * @param path the path or URI
+   * @param accept Accept header value
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response get(String path, String accept) throws IOException {
     return get(cluster, path, accept);
   }
 
-  public Response get(Cluster c, String path, String accept)
+  /**
+   * Send a GET request 
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param accept Accept header value
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response get(Cluster cluster, String path, String accept)
       throws IOException {
     Header[] headers = new Header[1];
     headers[0] = new Header("Accept", accept);
-    return get(c, path, headers);
+    return get(cluster, path, headers);
   }
 
+  /**
+   * Send a GET request
+   * @param path the path or URI
+   * @param headers the HTTP headers to include in the request, 
+   * <tt>Accept</tt> must be supplied
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response get(String path, Header[] headers) throws IOException {
     return get(cluster, path, headers);
   }
 
+  /**
+   * Send a GET request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param headers the HTTP headers to include in the request
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response get(Cluster c, String path, Header[] headers) 
       throws IOException {
     GetMethod method = new GetMethod();
@@ -172,69 +283,154 @@
     return new Response(code, headers, body);
   }
 
+  /**
+   * Send a PUT request
+   * @param path the path or URI
+   * @param contentType the content MIME type
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response put(String path, String contentType, byte[] content)
       throws IOException {
     return put(cluster, path, contentType, content);
   }
 
-  public Response put(Cluster c, String path, String contentType, 
+  /**
+   * Send a PUT request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param contentType the content MIME type
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response put(Cluster cluster, String path, String contentType, 
       byte[] content) throws IOException {
     Header[] headers = new Header[1];
     headers[0] = new Header("Content-Type", contentType);
-    return put(c, path, headers, content);
+    return put(cluster, path, headers, content);
   }
 
-  public Response put(String path, Header[] headers, byte[] body) 
+  /**
+   * Send a PUT request
+   * @param path the path or URI
+   * @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
+   * supplied
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response put(String path, Header[] headers, byte[] content) 
       throws IOException {
-    return put(cluster, path, headers, body);
+    return put(cluster, path, headers, content);
   }
 
-  public Response put(Cluster c, String path, Header[] headers, 
-      byte[] body) throws IOException {
+  /**
+   * Send a PUT request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
+   * supplied
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response put(Cluster cluster, String path, Header[] headers, 
+      byte[] content) throws IOException {
     PutMethod method = new PutMethod();
-    method.setRequestEntity(new ByteArrayRequestEntity(body));
-    int code = execute(c, method, headers, path);
+    method.setRequestEntity(new ByteArrayRequestEntity(content));
+    int code = execute(cluster, method, headers, path);
     headers = method.getResponseHeaders();
-    body = method.getResponseBody();
+    content = method.getResponseBody();
     method.releaseConnection();
-    return new Response(code, headers, body);
+    return new Response(code, headers, content);
   }
 
+  /**
+   * Send a POST request
+   * @param path the path or URI
+   * @param contentType the content MIME type
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response post(String path, String contentType, byte[] content)
       throws IOException {
     return post(cluster, path, contentType, content);
   }
 
-  public Response post(Cluster c, String path, String contentType, 
+  /**
+   * Send a POST request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param contentType the content MIME type
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response post(Cluster cluster, String path, String contentType, 
       byte[] content) throws IOException {
     Header[] headers = new Header[1];
     headers[0] = new Header("Content-Type", contentType);
-    return post(c, path, headers, content);
+    return post(cluster, path, headers, content);
   }
 
+  /**
+   * Send a POST request
+   * @param path the path or URI
+   * @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
+   * supplied
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response post(String path, Header[] headers, byte[] content) 
       throws IOException {
     return post(cluster, path, headers, content);
   }
 
-  public Response post(Cluster c, String path, Header[] headers, 
+  /**
+   * Send a POST request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @param headers the HTTP headers to include, <tt>Content-Type</tt> must be
+   * supplied
+   * @param content the content bytes
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response post(Cluster cluster, String path, Header[] headers, 
       byte[] content) throws IOException {
     PostMethod method = new PostMethod();
     method.setRequestEntity(new ByteArrayRequestEntity(content));
-    int code = execute(c, method, headers, path);
+    int code = execute(cluster, method, headers, path);
     headers = method.getResponseHeaders();
     content = method.getResponseBody();
     method.releaseConnection();
     return new Response(code, headers, content);
   }
 
+  /**
+   * Send a DELETE request
+   * @param path the path or URI
+   * @return a Response object with response detail
+   * @throws IOException
+   */
   public Response delete(String path) throws IOException {
     return delete(cluster, path);
   }
 
-  public Response delete(Cluster c, String path) throws IOException {
+  /**
+   * Send a DELETE request
+   * @param cluster the cluster definition
+   * @param path the path or URI
+   * @return a Response object with response detail
+   * @throws IOException
+   */
+  public Response delete(Cluster cluster, String path) throws IOException {
     DeleteMethod method = new DeleteMethod();
-    int code = execute(c, method, null, path);
+    int code = execute(cluster, method, null, path);
     Header[] headers = method.getResponseHeaders();
     method.releaseConnection();
     return new Response(code, headers);

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Cluster.java Fri Jul  3 02:14:46 2009
@@ -24,6 +24,10 @@
 import java.util.Collections;
 import java.util.List;
 
+/**
+ * A list of 'host:port' addresses of HTTP servers operating as a single
+ * entity, for example multiple redundant web service gateways.
+ */
 public class Cluster {
   protected List<String> nodes = 
     Collections.synchronizedList(new ArrayList<String>());

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/client/Response.java Fri Jul  3 02:14:46 2009
@@ -22,6 +22,9 @@
 
 import org.apache.commons.httpclient.Header;
 
+/**
+ * The HTTP result code, response headers, and body of a HTTP response.
+ */
 public class Response {
   private int code;
   private Header[] headers;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellModel.java Fri Jul  3 02:14:46 2009
@@ -33,34 +33,41 @@
 
 import com.google.protobuf.ByteString;
 
+/**
+ * Representation of a cell. A cell is a single value associated a column and
+ * optional qualifier, and either the timestamp when it was stored or the user-
+ * provided timestamp if one was explicitly supplied.
+ */
 @XmlRootElement(name="Cell")
 @XmlType(propOrder={"column","timestamp"})
 public class CellModel implements IProtobufWrapper, Serializable {
   private static final long serialVersionUID = 1L;
   
-  private long   timestamp = HConstants.LATEST_TIMESTAMP;
+  private long timestamp;
   private byte[] column;
   private byte[] value;
 
+  /**
+   * Default constructor
+   */
   public CellModel() {}
 
   /**
+   * Constructor
    * @param column
    * @param value
    */
   public CellModel(byte[] column, byte[] value) {
-    super();
-    this.column = column;
-    this.value = value;
+    this(column, HConstants.LATEST_TIMESTAMP, value);
   }
 
   /**
+   * Constructor
    * @param column
    * @param timestamp
    * @param value
    */
   public CellModel(byte[] column, long timestamp, byte[] value) {
-    super();
     this.column = column;
     this.timestamp = timestamp;
     this.value = value;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/CellSetModel.java Fri Jul  3 02:14:46 2009
@@ -34,22 +34,27 @@
 
 import com.google.protobuf.ByteString;
 
+/**
+ * Representation of a grouping of cells. May contain cells from more than
+ * one row. Encapsulates RowModel and CellModel models.
+ */
 @XmlRootElement(name="CellSet")
 public class CellSetModel implements Serializable, IProtobufWrapper {
 
   private static final long serialVersionUID = 1L;
   
   private List<RowModel> rows;
-  
-  
+
+  /**  
+   * Constructor
+   */
   public CellSetModel() {
     this.rows = new ArrayList<RowModel>();
   }
   
   /**
-   * @param rows
+   * @param rows the rows
    */
-  
   public CellSetModel(List<RowModel> rows) {
     super();
     this.rows = rows;
@@ -57,8 +62,7 @@
   
   /**
    * Add a row to this cell set
-   * 
-   * @param row
+   * @param row the row
    */
   public void addRow(RowModel row) {
     rows.add(row);

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ColumnSchemaModel.java Fri Jul  3 02:14:46 2009
@@ -33,6 +33,9 @@
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 
+/**
+ * Representation of a column family schema.
+ */
 @XmlRootElement(name="ColumnSchema")
 @XmlType(propOrder = {"name"})
 public class ColumnSchemaModel implements Serializable {
@@ -48,14 +51,27 @@
   private String name;
   private Map<QName,Object> attrs = new HashMap<QName,Object>();
 
+  /**
+   * Default constructor
+   */
   public ColumnSchemaModel() {}
 
+  /**
+   * Add an attribute to the column family schema
+   * @param name the attribute name
+   * @param value the attribute value
+   */
   public void addAttribute(String name, Object value) {
     attrs.put(new QName(name), value);
   }
 
+  /**
+   * @param name the attribute name
+   * @return the attribute value
+   */
   public String getAttribute(String name) {
-    return attrs.get(new QName(name)).toString();
+    Object o = attrs.get(new QName(name));
+    return o != null ? o.toString(): null;
   }
 
   /**
@@ -106,51 +122,78 @@
   // cannot be standard bean type getters and setters, otherwise this would
   // confuse JAXB
 
+  /**
+   * @return true if the BLOCKCACHE attribute is present and true
+   */
   public boolean __getBlockcache() {
     Object o = attrs.get(BLOCKCACHE);
     return o != null ? 
       Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKCACHE;
   }
 
+  /**
+   * @return the value of the BLOCKSIZE attribute or its default if it is unset
+   */
   public int __getBlocksize() {
     Object o = attrs.get(BLOCKSIZE);
     return o != null ? 
       Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOCKSIZE;
   }
 
+  /**
+   * @return true if the BLOOMFILTER attribute is present and true
+   */
   public boolean __getBloomfilter() {
     Object o = attrs.get(BLOOMFILTER);
     return o != null ? 
       Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_BLOOMFILTER;
   }
 
+  /**
+   * @return the value of the COMPRESSION attribute or its default if it is unset
+   */
   public String __getCompression() {
     Object o = attrs.get(COMPRESSION);
     return o != null ? o.toString() : HColumnDescriptor.DEFAULT_COMPRESSION;
   }
 
+  /**
+   * @return true if the IN_MEMORY attribute is present and true
+   */
   public boolean __getInMemory() {
     Object o = attrs.get(IN_MEMORY);
     return o != null ? 
       Boolean.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_IN_MEMORY;
   }
 
+  /**
+   * @return the value of the TTL attribute or its default if it is unset
+   */
   public int __getTTL() {
     Object o = attrs.get(TTL);
     return o != null ? 
       Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_TTL;
   }
 
+  /**
+   * @return the value of the VERSIONS attribute or its default if it is unset
+   */
   public int __getVersions() {
     Object o = attrs.get(VERSIONS);
     return o != null ? 
       Integer.valueOf(o.toString()) : HColumnDescriptor.DEFAULT_VERSIONS;
   }
 
+  /**
+   * @param value the desired value of the BLOCKSIZE attribute
+   */
   public void __setBlocksize(int value) {
     attrs.put(BLOCKSIZE, Integer.toString(value));
   }
 
+  /**
+   * @param value the desired value of the BLOCKCACHE attribute
+   */
   public void __setBlockcache(boolean value) {
     attrs.put(BLOCKCACHE, Boolean.toString(value));
   }
@@ -159,18 +202,30 @@
     attrs.put(BLOOMFILTER, Boolean.toString(value));
   }
 
+  /**
+   * @param value the desired value of the COMPRESSION attribute
+   */
   public void __setCompression(String value) {
     attrs.put(COMPRESSION, value); 
   }
 
+  /**
+   * @param value the desired value of the IN_MEMORY attribute
+   */
   public void __setInMemory(boolean value) {
     attrs.put(IN_MEMORY, Boolean.toString(value));
   }
 
+  /**
+   * @param value the desired value of the TTL attribute
+   */
   public void __setTTL(int value) {
     attrs.put(TTL, Integer.toString(value));
   }
 
+  /**
+   * @param value the desired value of the VERSIONS attribute
+   */
   public void __setVersions(int value) {
     attrs.put(VERSIONS, Integer.toString(value));
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/IProtobufWrapper.java Fri Jul  3 02:14:46 2009
@@ -22,8 +22,23 @@
 
 import java.io.IOException;
 
+/**
+ * Common interface for models capable of supporting protobuf marshalling
+ * and unmarshalling. Hooks up to the ProtobufMessageBodyConsumer and
+ * ProtobufMessageBodyProducer adapters. 
+ */
 public abstract interface IProtobufWrapper {
+  /**
+   * @return the protobuf represention of the model
+   */
   public byte[] createProtobufOutput();
+
+  /**
+   * Initialize the model from a protobuf representation.
+   * @param message the raw bytes of the protobuf message
+   * @return reference to self for convenience
+   * @throws IOException
+   */
   public IProtobufWrapper getObjectFromMessage(byte[] message)
     throws IOException;
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/RowModel.java Fri Jul  3 02:14:46 2009
@@ -29,60 +29,69 @@
 import javax.xml.bind.annotation.XmlElement;
 import javax.xml.bind.annotation.XmlRootElement;
 
+/**
+ * Representation of a row. A row is a related set of cells, grouped by common
+ * row key. RowModels do not appear in results by themselves. They are always
+ * encapsulated within CellSetModels.
+ */
 @XmlRootElement(name="Row")
 public class RowModel implements IProtobufWrapper, Serializable {
   private static final long serialVersionUID = 1L;
 
-  private byte[]            key;
-  private List<CellModel>   cells = new ArrayList<CellModel>();
+  private byte[] key;
+  private List<CellModel> cells = new ArrayList<CellModel>();
 
+  /**
+   * Default constructor
+   */
   public RowModel() { }
 
   /**
-   * @param key
+   * Constructor
+   * @param key the row key
    */
   public RowModel(final String key) {
     this(key.getBytes());
   }
   
   /**
-   * @param key
+   * Constructor
+   * @param key the row key
    */
   public RowModel(final byte[] key) {
-    super();
     this.key = key;
     cells = new ArrayList<CellModel>();
   }
 
   /**
-   * @param key
-   * @param cells
+   * Constructor
+   * @param key the row key
+   * @param cells the cells
    */
   public RowModel(final String key, final List<CellModel> cells) {
     this(key.getBytes(), cells);
   }
   
   /**
-   * @param key
-   * @param cells
+   * Constructor
+   * @param key the row key
+   * @param cells the cells
    */
   public RowModel(final byte[] key, final List<CellModel> cells) {
-    super();
     this.key = key;
     this.cells = cells;
   }
   
   /**
    * Adds a cell to the list of cells for this row
-   * 
-   * @param cell
+   * @param cell the cell
    */
   public void addCell(CellModel cell) {
     cells.add(cell);
   }
 
   /**
-   * @return the key
+   * @return the row key
    */
   @XmlAttribute
   public byte[] getKey() {
@@ -90,7 +99,7 @@
   }
 
   /**
-   * @param key the key to set
+   * @param key the row key
    */
   public void setKey(byte[] key) {
     this.key = key;
@@ -118,5 +127,4 @@
     throw new UnsupportedOperationException(
         "no protobuf equivalent to RowModel");
   }
-
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/ScannerModel.java Fri Jul  3 02:14:46 2009
@@ -34,6 +34,9 @@
 
 import com.google.protobuf.ByteString;
 
+/**
+ * A representation of Scanner parameters.
+ */
 @XmlRootElement(name="Scanner")
 public class ScannerModel implements IProtobufWrapper, Serializable {
   private static final long serialVersionUID = 1L;
@@ -45,8 +48,20 @@
   private long startTime = 0;
   private long endTime = Long.MAX_VALUE;
 
+  /**
+   * Default constructor
+   */
   public ScannerModel() {}
 
+  /**
+   * Constructor
+   * @param startRow the start key of the row-range
+   * @param endRow the end key of the row-range
+   * @param columns the columns to scan
+   * @param batch the number of values to return in batch
+   * @param endTime the upper bound on timestamps of values of interest
+   * (values with timestamps later than this are excluded)
+   */
   public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
       int batch, long endTime) {
     super();
@@ -57,6 +72,17 @@
     this.endTime = endTime;
   }
 
+  /**
+   * Constructor 
+   * @param startRow the start key of the row-range
+   * @param endRow the end key of the row-range
+   * @param columns the columns to scan
+   * @param batch the number of values to return in batch
+   * @param startTime the lower bound on timestamps of values of interest
+   * (values with timestamps earlier than this are excluded)
+   * @param endTime the upper bound on timestamps of values of interest
+   * (values with timestamps later than this are excluded)
+   */
   public ScannerModel(byte[] startRow, byte[] endRow, List<byte[]> columns,
       int batch, long startTime, long endTime) {
     super();
@@ -68,6 +94,10 @@
     this.endTime = endTime;
   }
 
+  /**
+   * Add a column to the column set
+   * @param column the column name, as &lt;column&gt;(:&lt;qualifier&gt;)?
+   */
   public void addColumn(byte[] column) {
     columns.add(column);
   }
@@ -156,14 +186,14 @@
   }
 
   /**
-   * @param startTime the lower bound on timestamps of items of interest
+   * @param startTime the lower bound on timestamps of values of interest
    */
   public void setStartTime(long startTime) {
     this.startTime = startTime;
   }
 
   /**
-   * @param endTime the upper bound on timestamps of items of interest
+   * @param endTime the upper bound on timestamps of values of interest
    */
   public void setEndTime(long endTime) {
     this.endTime = endTime;

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterStatusModel.java Fri Jul  3 02:14:46 2009
@@ -35,27 +35,58 @@
 
 import com.google.protobuf.ByteString;
 
+/**
+ * Representation of the status of a storage cluster:
+ * <p>
+ * <ul>
+ * <li>regions: the total number of regions served by the cluster</li>
+ * <li>requests: the total number of requests per second handled by the
+ * cluster in the last reporting interval</li>
+ * <li>averageLoad: the average load of the region servers in the cluster</li>
+ * <li>liveNodes: detailed status of the live region servers</li>
+ * <li>deadNodes: the names of region servers declared dead</li>
+ * </ul>
+ */
 @XmlRootElement(name="ClusterStatus")
 public class StorageClusterStatusModel 
     implements Serializable, IProtobufWrapper {
 	private static final long serialVersionUID = 1L;
 
+	/**
+	 * Represents a region server.
+	 */
 	public static class Node {
 	  
+	  /**
+	   * Represents a region hosted on a region server.
+	   */
 	  public static class Region {
 	    private byte[] name;
 
+	    /**
+	     * Default constructor
+	     */
 	    public Region() {}
 
+	    /**
+	     * Constructor
+	     * @param name the region name
+	     */
 	    public Region(byte[] name) {
 	      this.name = name;
 	    }
 
+	    /**
+	     * @return the region name
+	     */
 	    @XmlAttribute
 	    public byte[] getName() {
 	      return name;
 	    }
 
+	    /**
+	     * @param name the region name
+	     */
 	    public void setName(byte[] name) {
 	      this.name = name;
 	    }
@@ -66,53 +97,94 @@
     private int requests;
     private List<Region> regions = new ArrayList<Region>();
 
+    /**
+     * Add a region name to the list
+     * @param name the region name
+     */
     public void addRegion(byte[] name) {
       regions.add(new Region(name));
     }
 
-    public Region getRegion(int i) {
-      return regions.get(i);
+    /**
+     * @param index the index
+     * @return the region name
+     */
+    public Region getRegion(int index) {
+      return regions.get(index);
     }
 
+    /**
+     * Default constructor
+     */
     public Node() {}
 
+    /**
+     * Constructor
+     * @param name the region server name
+     * @param startCode the region server's start code
+     */
     public Node(String name, long startCode) {
       this.name = name;
       this.startCode = startCode;
     }
 
+    /**
+     * @return the region server's name
+     */
     @XmlAttribute
     public String getName() {
       return name;
     }
 
+    /**
+     * @return the region server's start code
+     */
     @XmlAttribute
     public long getStartCode() {
       return startCode;
     }
 
+    /**
+     * @return the list of regions served by the region server
+     */
     @XmlElement(name="Region")
     public List<Region> getRegions() {
       return regions;
     }
 
+    /**
+     * @return the number of requests per second processed by the region server
+     */
     @XmlAttribute
     public int getRequests() {
       return requests;
     }
 
+    /**
+     * @param name the region server's hostname
+     */
     public void setName(String name) {
       this.name = name;
     }
 
+    /**
+     * @param startCode the region server's start code
+     */
     public void setStartCode(long startCode) {
       this.startCode = startCode;
     }
 
+    /**
+     * @param regions a list of regions served by the region server
+     */
     public void setRegions(List<Region> regions) {
       this.regions = regions;
     }
 
+    /**
+     * @param requests the number of requests per second processed by the
+     * region server
+     */
     public void setRequests(int requests) {
       this.requests = requests;
     }
@@ -124,69 +196,121 @@
 	private int requests;
 	private double averageLoad;
 
+	/**
+	 * Add a live node to the cluster representation.
+	 * @param name the region server name
+	 * @param startCode the region server's start code
+	 */
 	public Node addLiveNode(String name, long startCode) {
 	  Node node = new Node(name, startCode);
 	  liveNodes.add(node);
 	  return node;
 	}
 
-	public Node getLiveNode(int i) {
-	  return liveNodes.get(i);
+	/**
+	 * @param index the index
+	 * @return the region server model
+	 */
+	public Node getLiveNode(int index) {
+	  return liveNodes.get(index);
 	}
 
+	/**
+	 * Add a dead node to the cluster representation.
+	 * @param node the dead region server's name
+	 */
 	public void addDeadNode(String node) {
 	  deadNodes.add(node);
 	}
 	
-	public String getDeadNode(int i) {
-	  return deadNodes.get(i);
+	/**
+	 * @param index the index
+	 * @return the dead region server's name
+	 */
+	public String getDeadNode(int index) {
+	  return deadNodes.get(index);
 	}
 
+	/**
+	 * Default constructor
+	 */
 	public StorageClusterStatusModel() {}
 
+	/**
+	 * @return the list of live nodes
+	 */
 	@XmlElement(name="Node")
 	@XmlElementWrapper(name="LiveNodes")
 	public List<Node> getLiveNodes() {
 	  return liveNodes;
 	}
 
+	/**
+	 * @return the list of dead nodes
+	 */
   @XmlElement(name="Node")
   @XmlElementWrapper(name="DeadNodes")
   public List<String> getDeadNodes() {
     return deadNodes;
   }
 
+  /**
+   * @return the total number of regions served by the cluster
+   */
   @XmlAttribute
   public int getRegions() {
     return regions;
   }
-  
+
+  /**
+   * @return the total number of requests per second handled by the cluster in
+   * the last reporting interval
+   */
   @XmlAttribute
   public int getRequests() {
     return requests;
   }
 
+  /**
+   * @return the average load of the region servers in the cluster
+   */
   @XmlAttribute
   public double getAverageLoad() {
     return averageLoad;
   }
 
+  /**
+   * @param nodes the list of live node models
+   */
   public void setLiveNodes(List<Node> nodes) {
     this.liveNodes = nodes;
   }
 
+  /**
+   * @param nodes the list of dead node names
+   */
   public void setDeadNodes(List<String> nodes) {
     this.deadNodes = nodes;
   }
 
+  /**
+   * @param regions the total number of regions served by the cluster
+   */
   public void setRegions(int regions) {
     this.regions = regions;
   }
-  
+
+  /**
+   * @param requests the total number of requests per second handled by the
+   * cluster
+   */
   public void setRequests(int requests) {
     this.requests = requests;
   }
 
+  /**
+   * @param averageLoad the average load of region servers in the cluster
+   */
   public void setAverageLoad(double averageLoad) {
     this.averageLoad = averageLoad;
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/StorageClusterVersionModel.java Fri Jul  3 02:14:46 2009
@@ -25,17 +25,26 @@
 import javax.xml.bind.annotation.XmlRootElement;
 import javax.xml.bind.annotation.XmlValue;
 
+/**
+ * Simple representation of the version of the storage cluster (HBase)
+ */
 @XmlRootElement(name="ClusterVersion")
 public class StorageClusterVersionModel implements Serializable {
 	private static final long serialVersionUID = 1L;
 
 	private String version;
 
+	/**
+	 * @return the storage cluster version
+	 */
 	@XmlValue
 	public String getVersion() {
 	  return version;
 	}
-	
+
+	/**
+	 * @param version the storage cluster version
+	 */
 	public void setVersion(String version) {
 	  this.version = version;
 	}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableInfoModel.java Fri Jul  3 02:14:46 2009
@@ -34,6 +34,9 @@
 
 import com.google.protobuf.ByteString;
 
+/**
+ * Representation of a list of table regions. 
+ */
 @XmlRootElement(name="TableInfo")
 @XmlType(propOrder = {"name","regions"})
 public class TableInfoModel implements Serializable, IProtobufWrapper {
@@ -42,16 +45,31 @@
   private String name;
   private List<TableRegionModel> regions = new ArrayList<TableRegionModel>();
 
+  /**
+   * Default constructor
+   */
   public TableInfoModel() {}
 
+  /**
+   * Constructor
+   * @param name
+   */
   public TableInfoModel(String name) {
     this.name = name;
   }
 
-  public void add(TableRegionModel object) {
-    regions.add(object);
+  /**
+   * Add a region model to the list
+   * @param region the region
+   */
+  public void add(TableRegionModel region) {
+    regions.add(region);
   }
-  
+
+  /**
+   * @param index the index
+   * @return the region model
+   */
   public TableRegionModel get(int index) {
     return regions.get(index);
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableListModel.java Fri Jul  3 02:14:46 2009
@@ -30,6 +30,9 @@
 
 import org.apache.hadoop.hbase.stargate.protobuf.generated.TableListMessage.TableList;
 
+/**
+ * Simple representation of a list of table names.
+ */
 @XmlRootElement(name="TableList")
 public class TableListModel implements Serializable, IProtobufWrapper {
 
@@ -37,12 +40,23 @@
 
 	private List<TableModel> tables = new ArrayList<TableModel>();
 
+	/**
+	 * Default constructor
+	 */
 	public TableListModel() {}
-	
-	public void add(TableModel object) {
-		tables.add(object);
+
+	/**
+	 * Add the table name model to the list
+	 * @param table the table model
+	 */
+	public void add(TableModel table) {
+		tables.add(table);
 	}
 	
+	/**
+	 * @param index the index
+	 * @return the table model
+	 */
 	public TableModel get(int index) {
 		return tables.get(index);
 	}

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableModel.java Fri Jul  3 02:14:46 2009
@@ -25,6 +25,9 @@
 import javax.xml.bind.annotation.XmlAttribute;
 import javax.xml.bind.annotation.XmlRootElement;
 
+/**
+ * Simple representation of a table name.
+ */
 @XmlRootElement(name="table")
 public class TableModel implements Serializable {
 
@@ -32,9 +35,13 @@
 	
 	private String name;
 	
+	/**
+	 * Default constructor
+	 */
 	public TableModel() {}
 
 	/**
+	 * Constructor
 	 * @param name
 	 */
 	public TableModel(String name) {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableRegionModel.java Fri Jul  3 02:14:46 2009
@@ -28,6 +28,10 @@
 
 import org.apache.hadoop.hbase.util.Bytes;
 
+/**
+ * Representation of a region of a table and its current location on the
+ * storage cluster.
+ */
 @XmlRootElement(name="Region")
 @XmlType(propOrder = {"name","id","startKey","endKey","location"})
 public class TableRegionModel implements Serializable {

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/TableSchemaModel.java Fri Jul  3 02:14:46 2009
@@ -41,6 +41,9 @@
 import org.apache.hadoop.hbase.stargate.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
 import org.apache.hadoop.hbase.stargate.protobuf.generated.TableSchemaMessage.TableSchema;
 
+/**
+ * A representation of HBase table descriptors.
+ */
 @XmlRootElement(name="TableSchema")
 @XmlType(propOrder = {"name","columns"})
 public class TableSchemaModel implements Serializable, IProtobufWrapper {
@@ -58,20 +61,44 @@
   private Map<QName,Object> attrs = new HashMap<QName,Object>();
   private List<ColumnSchemaModel> columns = new ArrayList<ColumnSchemaModel>();
   
+  /**
+   * Default constructor.
+   */
   public TableSchemaModel() {}
-  
+
+  /**
+   * Add an attribute to the table descriptor
+   * @param name attribute name
+   * @param value attribute value
+   */
   public void addAttribute(String name, Object value) {
     attrs.put(new QName(name), value);
   }
 
+  /**
+   * Return a table descriptor value as a string. Calls toString() on the
+   * object stored in the descriptor value map.
+   * @param name the attribute name
+   * @return the attribute value
+   */
   public String getAttribute(String name) {
-    return attrs.get(new QName(name)).toString();
+    Object o = attrs.get(new QName(name));
+    return o != null ? o.toString() : null;
   }
 
-  public void addColumnFamily(ColumnSchemaModel object) {
-    columns.add(object);
+  /**
+   * Add a column family to the table descriptor
+   * @param object the column family model
+   */
+  public void addColumnFamily(ColumnSchemaModel family) {
+    columns.add(family);
   }
-  
+
+  /**
+   * Retrieve the column family at the given index from the table descriptor
+   * @param index the index
+   * @return the column family model
+   */
   public ColumnSchemaModel getColumnFamily(int index) {
     return columns.get(index);
   }
@@ -149,40 +176,64 @@
   // cannot be standard bean type getters and setters, otherwise this would
   // confuse JAXB
 
+  /**
+   * @return true if IN_MEMORY attribute exists and is true
+   */
   public boolean __getInMemory() {
     Object o = attrs.get(IN_MEMORY);
     return o != null ? 
       Boolean.valueOf(o.toString()) : HTableDescriptor.DEFAULT_IN_MEMORY;
   }
 
+  /**
+   * @return true if IS_META attribute exists and is truel
+   */
   public boolean __getIsMeta() {
     Object o = attrs.get(IS_META);
     return o != null ? Boolean.valueOf(o.toString()) : false;
   }
 
+  /**
+   * @return true if IS_ROOT attribute exists and is truel
+   */
   public boolean __getIsRoot() {
     Object o = attrs.get(IS_ROOT);
     return o != null ? Boolean.valueOf(o.toString()) : false;
   }
 
+  /**
+   * @return true if READONLY attribute exists and is truel
+   */
   public boolean __getReadOnly() {
     Object o = attrs.get(READONLY);
     return o != null ? 
       Boolean.valueOf(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
   }
 
+  /**
+   * @param value desired value of IN_MEMORY attribute
+   */
   public void __setInMemory(boolean value) {
     attrs.put(IN_MEMORY, Boolean.toString(value));
   }
 
+  /**
+   * @param value desired value of IS_META attribute
+   */
   public void __setIsMeta(boolean value) {
     attrs.put(IS_META, Boolean.toString(value));
   }
 
+  /**
+   * @param value desired value of IS_ROOT attribute
+   */
   public void __setIsRoot(boolean value) {
     attrs.put(IS_ROOT, Boolean.toString(value));
   }
 
+  /**
+   * @param value desired value of READONLY attribute
+   */
   public void __setReadOnly(boolean value) {
     attrs.put(READONLY, Boolean.toString(value));
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/model/VersionModel.java Fri Jul  3 02:14:46 2009
@@ -32,6 +32,17 @@
 
 import com.sun.jersey.spi.container.servlet.ServletContainer;
 
+/**
+ * A representation of the collection of versions of the Stargate software
+ * components.
+ * <ul>
+ * <li>stargateVersion: Stargate revision</li>
+ * <li>jvmVersion: the JVM vendor and version information</li>
+ * <li>osVersion: the OS type, version, and hardware architecture</li>
+ * <li>serverVersion: the name and version of the servlet container</li>
+ * <li>jerseyVersion: the version of the embedded Jersey framework</li>
+ * </ul>
+ */
 @XmlRootElement(name="Version")
 public class VersionModel implements Serializable, IProtobufWrapper {
 
@@ -43,8 +54,15 @@
   private String serverVersion;
   private String jerseyVersion;
 
+  /**
+   * Default constructor. Do not use.
+   */
   public VersionModel() {}
   
+  /**
+   * Constructor
+   * @param context the servlet context
+   */
 	public VersionModel(ServletContext context) {
 	  stargateVersion = RESTServlet.VERSION_STRING;
 	  jvmVersion = System.getProperty("java.vm.vendor") + ' ' +
@@ -58,47 +76,77 @@
       .getImplementationVersion();
 	}
 
+	/**
+	 * @return the Stargate version
+	 */
 	@XmlAttribute(name="Stargate")
 	public String getStargateVersion() {
     return stargateVersion;
   }
 
+	/**
+	 * @return the JVM vendor and version
+	 */
   @XmlAttribute(name="JVM")
   public String getJvmVersion() {
     return jvmVersion;
   }
 
+  /**
+   * @return the OS name, version, and hardware architecture
+   */
   @XmlAttribute(name="OS")
   public String getOsVersion() {
     return osVersion;
   }
 
+  /**
+   * @return the servlet container version
+   */
   @XmlAttribute(name="Server")
   public String getServerVersion() {
     return serverVersion;
   }
 
+  /**
+   * @return the version of the embedded Jersey framework
+   */
   @XmlAttribute(name="Jersey")
   public String getJerseyVersion() {
     return jerseyVersion;
   }
 
+  /**
+   * @param version the Stargate version string
+   */
   public void setStargateVersion(String version) {
     this.stargateVersion = version;
   }
 
+  /**
+   * @param version the OS version string
+   */
   public void setOsVersion(String version) {
     this.osVersion = version;
   }
 
+  /**
+   * @param version the JVM version string
+   */
   public void setJvmVersion(String version) {
     this.jvmVersion = version;
   }
 
+  /**
+   * @param version the servlet container version string
+   */
   public void setServerVersion(String version) {
     this.serverVersion = version;
   }
 
+  /**
+   * @param version the Jersey framework version string
+   */
   public void setJerseyVersion(String version) {
     this.jerseyVersion = version;
   }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/JAXBContextResolver.java Fri Jul  3 02:14:46 2009
@@ -45,6 +45,12 @@
 import com.sun.jersey.api.json.JSONConfiguration;
 import com.sun.jersey.api.json.JSONJAXBContext;
 
+/**
+ * Plumbing for hooking up Jersey's JSON entity body encoding and decoding
+ * support to JAXB. Modify how the context is created (by using e.g. a 
+ * different configuration builder) to control how JSON is processed and
+ * created.
+ */
 @Provider
 public class JAXBContextResolver implements ContextResolver<JAXBContext> {
 
@@ -72,7 +78,7 @@
   public JAXBContextResolver() throws Exception {
 		this.types = new HashSet(Arrays.asList(cTypes));
 		this.context = new JSONJAXBContext(JSONConfiguration.natural().build(),
-				cTypes);
+		  cTypes);
 	}
 
 	@Override

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/consumer/ProtobufMessageBodyConsumer.java Fri Jul  3 02:14:46 2009
@@ -38,9 +38,14 @@
 import org.apache.hadoop.hbase.stargate.Constants;
 import org.apache.hadoop.hbase.stargate.model.IProtobufWrapper;
 
+/**
+ * Adapter for hooking up Jersey content processing dispatch to
+ * IProtobufWrapper interface capable handlers for decoding protobuf input.
+ */
 @Provider
 @Consumes(Constants.MIMETYPE_PROTOBUF)
-public class ProtobufMessageBodyConsumer implements MessageBodyReader<IProtobufWrapper> {
+public class ProtobufMessageBodyConsumer 
+    implements MessageBodyReader<IProtobufWrapper> {
   private static final Log LOG =
     LogFactory.getLog(ProtobufMessageBodyConsumer.class);
 
@@ -79,5 +84,4 @@
     }
     return obj;
   }
-
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/PlainTextMessageBodyProducer.java Fri Jul  3 02:14:46 2009
@@ -24,6 +24,8 @@
 import java.io.OutputStream;
 import java.lang.annotation.Annotation;
 import java.lang.reflect.Type;
+import java.util.Map;
+import java.util.WeakHashMap;
 
 import javax.ws.rs.Produces;
 import javax.ws.rs.WebApplicationException;
@@ -34,22 +36,31 @@
 
 import org.apache.hadoop.hbase.stargate.Constants;
 
+/**
+ * An adapter between Jersey and Object.toString(). Hooks up plain text output
+ * to the Jersey content handling framework. 
+ * Jersey will first call getSize() to learn the number of bytes that will be
+ * sent, then writeTo to perform the actual I/O.
+ */
 @Provider
 @Produces(Constants.MIMETYPE_TEXT)
-public class PlainTextMessageBodyProducer implements MessageBodyWriter<Object>{
+public class PlainTextMessageBodyProducer 
+  implements MessageBodyWriter<Object> {
+
+  private Map<Object, byte[]> buffer = new WeakHashMap<Object, byte[]>();
+
+  @Override
+  public boolean isWriteable(Class<?> arg0, Type arg1, Annotation[] arg2,
+      MediaType arg3) {
+    return true;
+  }
 
 	@Override
 	public long getSize(Object object, Class<?> type, Type genericType,
 			Annotation[] annotations, MediaType mediaType) {
-		//TODO This is cheating, this needs to either be cashed or I need 
-		//	   to figure out a better way to calculate this information
-		return object.toString().getBytes().length;
-	}
-
-	@Override
-	public boolean isWriteable(Class<?> arg0, Type arg1, Annotation[] arg2,
-			MediaType arg3) {
-		return true;
+	  byte[] bytes = object.toString().getBytes(); 
+	  buffer.put(object, bytes);
+		return bytes.length;
 	}
 
 	@Override
@@ -57,7 +68,6 @@
 			Annotation[] annotations, MediaType mediaType,
 			MultivaluedMap<String, Object> httpHeaders, OutputStream outStream)
 			throws IOException, WebApplicationException {
-		outStream.write(object.toString().getBytes());
-	}
-	
+		outStream.write(buffer.remove(object));
+	}	
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/java/org/apache/hadoop/hbase/stargate/provider/producer/ProtobufMessageBodyProducer.java Fri Jul  3 02:14:46 2009
@@ -38,35 +38,43 @@
 import org.apache.hadoop.hbase.stargate.Constants;
 import org.apache.hadoop.hbase.stargate.model.IProtobufWrapper;
 
+/**
+ * An adapter between Jersey and IProtobufWrapper implementors. Hooks up
+ * protobuf output producing methods to the Jersey content handling framework.
+ * Jersey will first call getSize() to learn the number of bytes that will be
+ * sent, then writeTo to perform the actual I/O.
+ */
 @Provider
 @Produces(Constants.MIMETYPE_PROTOBUF)
-public class ProtobufMessageBodyProducer implements MessageBodyWriter<IProtobufWrapper> {
+public class ProtobufMessageBodyProducer
+  implements MessageBodyWriter<IProtobufWrapper> {
 
-	@Override
-	public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
-        return IProtobufWrapper.class.isAssignableFrom(type);
-    }
-
-    private Map<Object, byte[]> buffer = new WeakHashMap<Object, byte[]>();
-
-    @Override
-    public long getSize(IProtobufWrapper m, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        try {
-        	baos.write(m.createProtobufOutput());
-        } catch (IOException e) {
-            return -1;
-        }
-        byte[] bytes = baos.toByteArray();
-        buffer.put(m, bytes);
-        return bytes.length;
-    }
-
-    public void writeTo(IProtobufWrapper m, Class<?> type, Type genericType, Annotation[] annotations, 
-                MediaType mediaType, MultivaluedMap<String, Object> httpHeaders,
-                OutputStream entityStream) throws IOException, WebApplicationException {
-        entityStream.write(buffer.remove(m));
-    }
+  private Map<Object, byte[]> buffer = new WeakHashMap<Object, byte[]>();
 
+	@Override
+	public boolean isWriteable(Class<?> type, Type genericType, 
+	  Annotation[] annotations, MediaType mediaType) {
+      return IProtobufWrapper.class.isAssignableFrom(type);
+  }
 
+	@Override
+	public long getSize(IProtobufWrapper m, Class<?> type, Type genericType,
+	    Annotation[] annotations, MediaType mediaType) {
+	  ByteArrayOutputStream baos = new ByteArrayOutputStream();
+	  try {
+	    baos.write(m.createProtobufOutput());
+	  } catch (IOException e) {
+	    return -1;
+	  }
+	  byte[] bytes = baos.toByteArray();
+	  buffer.put(m, bytes);
+	  return bytes.length;
+	}
+
+	public void writeTo(IProtobufWrapper m, Class<?> type, Type genericType,
+	    Annotation[] annotations, MediaType mediaType, 
+	    MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) 
+	    throws IOException, WebApplicationException {
+	  entityStream.write(buffer.remove(m));
+	}
 }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestRowResource.java Fri Jul  3 02:14:46 2009
@@ -317,7 +317,7 @@
     Thread.yield();
 
     // make sure the fake row was not actually created
-    response = client.get(path);
+    response = client.get(path, MIMETYPE_XML);
     assertEquals(response.getCode(), 404);
 
     // check that all of the values were created
@@ -349,7 +349,7 @@
     Thread.yield();
 
     // make sure the fake row was not actually created
-    response = client.get(path);
+    response = client.get(path, MIMETYPE_PROTOBUF);
     assertEquals(response.getCode(), 404);
 
     // check that all of the values were created

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java?rev=790778&r1=790777&r2=790778&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/contrib/stargate/src/test/org/apache/hadoop/hbase/stargate/TestVersionResource.java Fri Jul  3 02:14:46 2009
@@ -33,6 +33,9 @@
 import org.apache.hadoop.hbase.stargate.client.Response;
 import org.apache.hadoop.hbase.stargate.model.StorageClusterVersionModel;
 import org.apache.hadoop.hbase.stargate.model.VersionModel;
+import org.apache.hadoop.hbase.util.Bytes;
+
+import com.sun.jersey.spi.container.servlet.ServletContainer;
 
 public class TestVersionResource extends MiniClusterTestCase {
   private static final Log LOG =
@@ -63,15 +66,38 @@
   private static void validate(VersionModel model) {
     assertNotNull(model);
     assertNotNull(model.getStargateVersion());
-    assertNotNull(model.getOsVersion());
-    assertNotNull(model.getJvmVersion());
+    assertEquals(model.getStargateVersion(), RESTServlet.VERSION_STRING);
+    String osVersion = model.getOsVersion(); 
+    assertNotNull(osVersion);
+    assertTrue(osVersion.contains(System.getProperty("os.name")));
+    assertTrue(osVersion.contains(System.getProperty("os.version")));
+    assertTrue(osVersion.contains(System.getProperty("os.arch")));
+    String jvmVersion = model.getJvmVersion();
+    assertNotNull(jvmVersion);
+    assertTrue(jvmVersion.contains(System.getProperty("java.vm.vendor")));
+    assertTrue(jvmVersion.contains(System.getProperty("java.version")));
+    assertTrue(jvmVersion.contains(System.getProperty("java.vm.version")));
     assertNotNull(model.getServerVersion());
-    assertNotNull(model.getJerseyVersion());
+    String jerseyVersion = model.getJerseyVersion();
+    assertNotNull(jerseyVersion);
+    assertEquals(jerseyVersion, ServletContainer.class.getPackage()
+      .getImplementationVersion());
   }
 
   public void testGetStargateVersionText() throws IOException {
     Response response = client.get(Constants.PATH_VERSION, MIMETYPE_PLAIN);
     assertTrue(response.getCode() == 200);
+    String body = Bytes.toString(response.getBody());
+    assertTrue(body.length() > 0);
+    assertTrue(body.contains(RESTServlet.VERSION_STRING));
+    assertTrue(body.contains(System.getProperty("java.vm.vendor")));
+    assertTrue(body.contains(System.getProperty("java.version")));
+    assertTrue(body.contains(System.getProperty("java.vm.version")));
+    assertTrue(body.contains(System.getProperty("os.name")));
+    assertTrue(body.contains(System.getProperty("os.version")));
+    assertTrue(body.contains(System.getProperty("os.arch")));
+    assertTrue(body.contains(ServletContainer.class.getPackage()
+      .getImplementationVersion()));
   }
 
   public void testGetStargateVersionXML() throws IOException, JAXBException {

Added: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java?rev=790778&view=auto
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java (added)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/filter/FilterList.java Fri Jul  3 02:14:46 2009
@@ -0,0 +1,217 @@
+/**
+ * Copyright 2009 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.filter;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.io.HbaseObjectWritable;
+
+/**
+ * Implementation of {@link Filter} that represents an ordered List of Filters
+ * which will be evaluated with a specified boolean operator MUST_PASS_ALL 
+ * (!AND) or MUST_PASS_ONE (!OR).  Since you can use Filter Lists as children
+ * of Filter Lists, you can create a hierarchy of filters to be evaluated.
+ * <p>TODO: Fix creation of Configuration on serialization and deserialization. 
+ */
+public class FilterList implements Filter {
+
+  /** set operator */
+  public static enum Operator {
+    /** !AND */
+    MUST_PASS_ALL,
+    /** !OR */
+    MUST_PASS_ONE
+  }
+
+  private Operator operator = Operator.MUST_PASS_ALL;
+  private List<Filter> filters = new ArrayList<Filter>();
+
+  /**
+   * Default constructor, filters nothing. Required though for RPC
+   * deserialization.
+   */
+  public FilterList() {
+    super();
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s. The default operator
+   * MUST_PASS_ALL is assumed.
+   * 
+   * @param rowFilters
+   */
+  public FilterList(final List<Filter> rowFilters) {
+    this.filters = rowFilters;
+  }
+
+  /**
+   * Constructor that takes a set of {@link Filter}s and an operator.
+   * 
+   * @param operator Operator to process filter set with.
+   * @param rowFilters Set of row filters.
+   */
+  public FilterList(final Operator operator, final List<Filter> rowFilters) {
+    this.filters = rowFilters;
+    this.operator = operator;
+  }
+
+  /**
+   * Get the operator.
+   * 
+   * @return operator
+   */
+  public Operator getOperator() {
+    return operator;
+  }
+
+  /**
+   * Get the filters.
+   * 
+   * @return filters
+   */
+  public List<Filter> getFilters() {
+    return filters;
+  }
+
+  /**
+   * Add a filter.
+   * 
+   * @param filter
+   */
+  public void addFilter(Filter filter) {
+    this.filters.add(filter);
+  }
+
+  public void reset() {
+    for (Filter filter : filters) {
+      filter.reset();
+    }
+  }
+
+  public boolean filterRowKey(byte[] rowKey, int offset, int length) {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()
+            || filter.filterRowKey(rowKey, offset, length)) {
+          return true;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (!filter.filterAllRemaining()
+            && !filter.filterRowKey(rowKey, offset, length)) {
+          return false;
+        }
+      }
+    }
+    return  operator == Operator.MUST_PASS_ONE;
+  }
+
+  public boolean filterAllRemaining() {
+    for (Filter filter : filters) {
+      if (filter.filterAllRemaining()) {
+        if (operator == Operator.MUST_PASS_ALL) {
+          return true;
+        }
+      } else {
+        if (operator == Operator.MUST_PASS_ONE) {
+          return false;
+        }
+      }
+    }
+    return operator == Operator.MUST_PASS_ONE;
+  }
+
+  public ReturnCode filterKeyValue(KeyValue v) {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()) {
+          return ReturnCode.NEXT_ROW;
+        }
+        switch (filter.filterKeyValue(v)) {
+        case INCLUDE:
+          continue;
+        case NEXT_ROW:
+        case SKIP:
+          return ReturnCode.SKIP;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (filter.filterAllRemaining()) {
+          continue;
+        }
+
+        switch (filter.filterKeyValue(v)) {
+        case INCLUDE:
+          return ReturnCode.INCLUDE;
+        case NEXT_ROW:
+        case SKIP:
+          continue;
+        }
+      }
+    }
+    return operator == Operator.MUST_PASS_ONE?
+      ReturnCode.SKIP: ReturnCode.INCLUDE;
+  }
+
+  public boolean filterRow() {
+    for (Filter filter : filters) {
+      if (operator == Operator.MUST_PASS_ALL) {
+        if (filter.filterAllRemaining()
+            || filter.filterRow()) {
+          return true;
+        }
+      } else if (operator == Operator.MUST_PASS_ONE) {
+        if (!filter.filterAllRemaining()
+            && !filter.filterRow()) {
+          return false;
+        }
+      }
+    }
+    return  operator == Operator.MUST_PASS_ONE;
+  }
+
+  public void readFields(final DataInput in) throws IOException {
+    Configuration conf = new HBaseConfiguration();
+    byte opByte = in.readByte();
+    operator = Operator.values()[opByte];
+    int size = in.readInt();
+    if (size > 0) {
+      filters = new ArrayList<Filter>(size);
+      for (int i = 0; i < size; i++) {
+        Filter filter = (Filter)HbaseObjectWritable.readObject(in, conf);
+        filters.add(filter);
+      }
+    }
+  }
+
+  public void write(final DataOutput out) throws IOException {
+    Configuration conf = new HBaseConfiguration();
+    out.writeByte(operator.ordinal());
+    out.writeInt(filters.size());
+    for (Filter filter : filters) {
+      HbaseObjectWritable.writeObject(out, filter, filter.getClass(), conf);
+    }
+  }
+}
\ No newline at end of file