You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2008/02/04 22:48:17 UTC

svn commit: r618453 - in /hadoop/hbase/trunk: ./ conf/ lib/ lib/jetty-ext/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/hql/ src/test/org/apache/hadoop/hbase/

Author: stack
Date: Mon Feb  4 13:48:13 2008
New Revision: 618453

URL: http://svn.apache.org/viewvc?rev=618453&view=rev
Log:
HBASE-403  Fix build after move of hbase in svn
Part 1: jar and test targets works as do javacc, clean.
TODO: Package needs clean up.

Added:
    hadoop/hbase/trunk/lib/commons-cli-2.0-SNAPSHOT.jar   (with props)
    hadoop/hbase/trunk/lib/commons-logging-1.0.4.jar   (with props)
    hadoop/hbase/trunk/lib/commons-logging-api-1.0.4.jar   (with props)
    hadoop/hbase/trunk/lib/hadoop-0.16.0-core.jar   (with props)
    hadoop/hbase/trunk/lib/hadoop-0.16.0-test.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-5.1.4.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-ext/
    hadoop/hbase/trunk/lib/jetty-ext/commons-el.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-ext/jasper-compiler.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-ext/jasper-runtime.jar   (with props)
    hadoop/hbase/trunk/lib/jetty-ext/jsp-api.jar   (with props)
    hadoop/hbase/trunk/lib/junit-3.8.1.jar   (with props)
    hadoop/hbase/trunk/lib/log4j-1.2.13.jar   (with props)
    hadoop/hbase/trunk/lib/servlet-api.jar   (with props)
    hadoop/hbase/trunk/lib/xmlenc-0.52.jar   (with props)
Modified:
    hadoop/hbase/trunk/build-webapps.xml
    hadoop/hbase/trunk/build.xml
    hadoop/hbase/trunk/conf/hbase-default.xml
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java
    hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java

Modified: hadoop/hbase/trunk/build-webapps.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/build-webapps.xml?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/build-webapps.xml (original)
+++ hadoop/hbase/trunk/build-webapps.xml Mon Feb  4 13:48:13 2008
@@ -41,20 +41,16 @@
 -->
 <project name="build.hbase.jsp" default="jspc">
   <property name="lib.dir" value="${basedir}/lib" />
-  <property name="hadoop.root" location="${basedir}/../../../"/>
   <property name="src.webapps" value="${basedir}/src/webapps" />
   <property name="generated.webapps.src"
     value="${basedir}/src/java"/>
    
   <target name="jspc" >
     <path id="jspc.classpath">
-      <fileset dir="${lib.dir}">
-        <include name="commons-el*jar" />
-      </fileset>
-      <fileset dir="${hadoop.root}/lib/jetty-ext/">
+      <fileset dir="${basedir}/lib/jetty-ext/">
         <include name="*jar" />
       </fileset>
-      <fileset dir="${hadoop.root}/lib/">
+      <fileset dir="${basedir}/lib/">
         <include name="servlet-api*jar" />
         <include name="commons-logging*jar" />
       </fileset>

Modified: hadoop/hbase/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/build.xml?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/build.xml (original)
+++ hadoop/hbase/trunk/build.xml Mon Feb  4 13:48:13 2008
@@ -18,18 +18,60 @@
 -->
 
 <project name="hbase" default="jar">
-  <import file="../build-contrib.xml"/>
-
-  <property name="build.webapps" value="${build.dir}/webapps"/>
-  <property name="build.lib" value="${build.dir}/lib"/>
-  <property name="build.conf" value="${build.dir}/conf"/>
-  <property name="build.bin" value="${build.dir}/bin"/>
-  <property name="src.webapps" value="${basedir}/src/webapps" />
+  <!-- Load all the default properties, and any the user wants    -->
+  <!-- to contribute (without having to type -D or edit this file -->
+  <property file="${user.home}/${name}.build.properties" />
+  <property file="${basedir}/build.properties" />
+
+  <property name="src.dir"  location="${basedir}/src/java"/>
+  <property name="src.test" location="${basedir}/src/test"/>
+  <property name="src.examples" location="${basedir}/src/examples"/>
+  <property name="src.webapps" location="${basedir}/src/webapps"/>
+
+  <property name="test.output" value="no"/>
+  <property name="test.timeout" value="900000"/>
+
+  <property name="build.dir" location="${basedir}/build"/>
+  <property name="build.bin" location="${build.dir}/bin"/>
+  <property name="build.conf" location="${build.dir}/conf"/>
+  <property name="build.webapps" location="${build.dir}/webpps"/>
+  <property name="build.lib" location="${build.dir}/lib"/>
+  <property name="build.classes" location="${build.dir}/classes"/>
+  <property name="build.test" location="${build.dir}/test"/>
+  <property name="build.examples" location="${build.dir}/examples"/>
+
+  <property name="test.build.dir" value="${build.dir}/test"/>
+  <property name="test.log.dir" value="${test.build.dir}/logs"/>
+  <property name="test.junit.output.format" value="plain"/>
+
+  <!-- all jars together -->
+  <property name="javac.deprecation" value="off"/>
+  <property name="javac.debug" value="on"/>
+
+  <property name="javadoc.link"
+    value="http://java.sun.com/j2se/1.5.0/docs/api/"/>
+
+  <property name="build.encoding" value="ISO-8859-1"/>
+
+  <!-- the normal classpath -->
+  <fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar"/>
+
+  <path id="classpath">
+    <pathelement location="${build.classes}"/>
+    <fileset refid="lib.jars"/>
+      <fileset dir="${basedir}/lib/jetty-ext/">
+        <include name="*jar" />
+      </fileset>
+  </path>
 
   <target name="init">
-    <antcall target="hadoopbuildcontrib.init"/>
+    <mkdir dir="${build.dir}"/>
+    <mkdir dir="${build.classes}"/>
+    <mkdir dir="${build.test}"/>
+    <mkdir dir="${build.examples}"/>
+
     <!--Version is set only if called from hadoop build.xml. Set a default-->
-    <condition property="version" value="0.15.0-dev">
+    <condition property="version" value="0.1.0-dev">
       <not>
         <isset property="version" />
       </not>
@@ -63,8 +105,8 @@
   </target>
 
   <target name="javacc" if="javacc.home">
-  <echo message="javacc.home: ${javacc.home}"/>
-  <property name="hql.src.dir" 
+      <echo message="javacc.home: ${javacc.home}"/>
+     <property name="hql.src.dir" 
        value="${src.dir}/org/apache/hadoop/hbase/hql" /> 
      <mkdir dir="${hql.src.dir}/generated" />
      <javacc
@@ -75,7 +117,6 @@
   </target>
 
   <target name="compile" depends="init,javacc">
-   <echo message="contrib: ${name}"/>
    <!--Compile whats under src and generated java classes made from jsp-->
    <javac
     encoding="${build.encoding}"
@@ -91,9 +132,9 @@
 	
   <!-- Override jar target to specify main class -->
   <target name="jar" depends="compile">
-    <jar jarfile="${build.dir}/hadoop-${version}-${name}.jar"
+    <jar jarfile="${build.dir}/hbase-${version}.jar"
         basedir="${build.classes}" >
-      <fileset file="${root}/conf/hbase-default.xml"/>
+      <fileset file="${basedir}/conf/hbase-default.xml"/>
       <zipfileset dir="${build.webapps}" prefix="webapps"/>
     </jar>
   </target>
@@ -101,6 +142,7 @@
   <!--Manage our own packaging... install our dependencies,
   bin, etc.-->
   <target name="package" depends="jar" unless="skip.contrib"> 
+  <!--TODO!!!-->
     <condition property="dist.dir" value="distribution">
       <not>
         <isset property="dist.dir" />
@@ -110,7 +152,7 @@
     <mkdir dir="${hbase.dist.dir}"/>
     <copy todir="${hbase.dist.dir}" includeEmptyDirs="false" flatten="true">
       <fileset dir="${build.dir}">
-        <include name="hadoop-${version}-${name}.jar" />
+        <include name="hbase-${version}.jar" />
       </fileset>
     </copy>
     <mkdir dir="${hbase.dist.dir}/webapps"/>
@@ -138,8 +180,7 @@
   <!-- Override compile-test  target so can generate a hbase 
        test jar that has test and hbase classes. 
    --> 
-  <target name="compile-test" depends="compile" if="test.available"> 
-    <echo message="contrib: ${name}"/> 
+  <target name="compile-test" depends="compile" > 
     <javac 
      encoding="${build.encoding}" 
      srcdir="${src.test}" 
@@ -165,12 +206,54 @@
   <path id="test.classpath">
     <pathelement location="${build.test}" />
     <pathelement location="${src.test}"/>
-    <pathelement location="${hadoop.root}/build/test/classes"/>
-    <pathelement location="${hadoop.root}/src/contrib/test"/>
     <pathelement location="${conf.dir}"/>
-    <pathelement location="${hadoop.root}/build"/>
-    <pathelement location="${root}/conf"/>
     <pathelement location="${build.dir}"/>
     <path refid="classpath"/>
   </path>
+
+  <!-- ================================================================== -->
+  <!-- Run unit tests                                                     -->
+  <!-- ================================================================== -->
+  <target name="test" depends="compile-test, compile" >
+    <delete dir="${test.log.dir}"/>
+    <mkdir dir="${test.log.dir}"/>
+    <junit
+      printsummary="yes" showoutput="${test.output}" 
+      haltonfailure="no" fork="yes" maxmemory="256m"
+      errorProperty="tests.failed" failureProperty="tests.failed"
+      timeout="${test.timeout}">
+      
+      <sysproperty key="test.build.data" value="${build.test}/data"/>
+      <sysproperty key="build.test" value="${build.test}"/>
+      <sysproperty key="contrib.name" value="${name}"/>
+      
+      <!-- requires fork=yes for: 
+        relative File paths to use the specified user.dir 
+        classpath to use build/contrib/*.jar
+      -->
+      <sysproperty key="user.dir" value="${build.test}/data"/>
+      
+      <sysproperty key="fs.default.name" value="${fs.default.name}"/>
+      <sysproperty key="hadoop.test.localoutputfile" value="${hadoop.test.localoutputfile}"/>
+      <sysproperty key="test.log.dir" value="${hadoop.log.dir}"/> 
+      <classpath refid="test.classpath"/>
+      <formatter type="${test.junit.output.format}" />
+      <batchtest todir="${build.test}" unless="testcase">
+        <fileset dir="${src.test}"
+                 includes="**/Test*.java" excludes="**/${test.exclude}.java" />
+      </batchtest>
+      <batchtest todir="${build.test}" if="testcase">
+        <fileset dir="${src.test}" includes="**/${testcase}.java"/>
+      </batchtest>
+    </junit>
+    <fail if="tests.failed">Tests failed!</fail>
+
+  </target>
+
+  <!-- ================================================================== -->
+  <!-- Clean.  Delete the build files, and their directories              -->
+  <!-- ================================================================== -->
+  <target name="clean">
+    <delete dir="${build.dir}"/>
+  </target>
 </project>

Modified: hadoop/hbase/trunk/conf/hbase-default.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-default.xml?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-default.xml (original)
+++ hadoop/hbase/trunk/conf/hbase-default.xml Mon Feb  4 13:48:13 2008
@@ -221,12 +221,6 @@
     such as hlog.
     </description>
   </property>
-  <property>
-    <name>hbase.hstore.blockCache.blockSize</name>
-    <value>65536</value>
-    <description>The size of each block in any block caches.
-    </description>
-  </property>
 
   <!-- HbaseShell Configurations -->
   <property>

Added: hadoop/hbase/trunk/lib/commons-cli-2.0-SNAPSHOT.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/commons-cli-2.0-SNAPSHOT.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/commons-cli-2.0-SNAPSHOT.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/commons-logging-1.0.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/commons-logging-1.0.4.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/commons-logging-1.0.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/commons-logging-api-1.0.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/commons-logging-api-1.0.4.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/commons-logging-api-1.0.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/hadoop-0.16.0-core.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.16.0-core.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.16.0-core.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/hadoop-0.16.0-test.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/hadoop-0.16.0-test.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/hadoop-0.16.0-test.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-5.1.4.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-5.1.4.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-5.1.4.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-ext/commons-el.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-ext/commons-el.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-ext/commons-el.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-ext/jasper-compiler.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-ext/jasper-compiler.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-ext/jasper-compiler.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-ext/jasper-runtime.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-ext/jasper-runtime.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-ext/jasper-runtime.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/jetty-ext/jsp-api.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/jetty-ext/jsp-api.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/jetty-ext/jsp-api.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/junit-3.8.1.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/junit-3.8.1.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/junit-3.8.1.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/log4j-1.2.13.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/log4j-1.2.13.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/log4j-1.2.13.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/servlet-api.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/servlet-api.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/servlet-api.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Added: hadoop/hbase/trunk/lib/xmlenc-0.52.jar
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/lib/xmlenc-0.52.jar?rev=618453&view=auto
==============================================================================
Binary file - no diff available.

Propchange: hadoop/hbase/trunk/lib/xmlenc-0.52.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Mon Feb  4 13:48:13 2008
@@ -42,7 +42,7 @@
 public class HColumnDescriptor implements WritableComparable {
   
   // For future backward compatibility
-  private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)2;
+  private static final byte COLUMN_DESCRIPTOR_VERSION = (byte)1;
   
   /** Legal family names can only contain 'word characters' and end in a colon. */
   public static final Pattern LEGAL_FAMILY_NAME = Pattern.compile("\\w+:");
@@ -77,11 +77,6 @@
   public static final boolean DEFAULT_IN_MEMORY = false;
   
   /**
-   * Default setting for whether to use a block cache or not.
-   */
-  public static final boolean DEFAULT_BLOCK_CACHE_ENABLED = false;
-  
-  /**
    * Default maximum length of cell contents.
    */
   public static final int DEFAULT_MAX_VALUE_LENGTH = Integer.MAX_VALUE;
@@ -100,8 +95,6 @@
   private CompressionType compressionType;
   // Serve reads from in-memory cache
   private boolean inMemory;
-  // Serve reads from in-memory block cache
-  private boolean blockCacheEnabled;
   // Maximum value size
   private int maxValueLength;
   // True if bloom filter was specified
@@ -130,7 +123,6 @@
     this(columnName == null || columnName.length() <= 0?
       new Text(): new Text(columnName),
       DEFAULT_N_VERSIONS, DEFAULT_COMPRESSION_TYPE, DEFAULT_IN_MEMORY,
-      DEFAULT_BLOCK_CACHE_ENABLED, 
       Integer.MAX_VALUE, DEFAULT_BLOOM_FILTER_DESCRIPTOR);
   }
   
@@ -142,7 +134,6 @@
    * @param compression Compression type
    * @param inMemory If true, column data should be kept in an HRegionServer's
    * cache
-   * @param blockCacheEnabled If true, MapFile blocks should be cached
    * @param maxValueLength Restrict values to &lt;= this value
    * @param bloomFilter Enable the specified bloom filter for this column
    * 
@@ -153,7 +144,6 @@
    */
   public HColumnDescriptor(final Text name, final int maxVersions,
       final CompressionType compression, final boolean inMemory,
-      final boolean blockCacheEnabled,
       final int maxValueLength, final BloomFilterDescriptor bloomFilter) {
     String familyStr = name.toString();
     // Test name if not null (It can be null when deserializing after
@@ -175,7 +165,6 @@
     }
     this.maxVersions = maxVersions;
     this.inMemory = inMemory;
-    this.blockCacheEnabled = blockCacheEnabled;
     this.maxValueLength = maxValueLength;
     this.bloomFilter = bloomFilter;
     this.bloomFilterSpecified = this.bloomFilter == null ? false : true;
@@ -224,13 +213,6 @@
   }
   
   /**
-   * @return True if MapFile blocks should be cached.
-   */
-  public boolean isBlockCacheEnabled() {
-    return blockCacheEnabled;
-  }
-
-  /**
    * @return Maximum value length.
    */
   public int getMaxValueLength() {
@@ -252,7 +234,6 @@
     return "{name: " + tmp.substring(0, tmp.length() - 1) +
       ", max versions: " + maxVersions +
       ", compression: " + this.compressionType + ", in memory: " + inMemory +
-      ", block cache enabled: " + blockCacheEnabled +
       ", max length: " + maxValueLength + ", bloom filter: " +
       (bloomFilterSpecified ? bloomFilter.toString() : "none") + "}";
   }
@@ -270,7 +251,6 @@
     result ^= Integer.valueOf(this.maxVersions).hashCode();
     result ^= this.compressionType.hashCode();
     result ^= Boolean.valueOf(this.inMemory).hashCode();
-    result ^= Boolean.valueOf(this.blockCacheEnabled).hashCode();
     result ^= Integer.valueOf(this.maxValueLength).hashCode();
     result ^= Boolean.valueOf(this.bloomFilterSpecified).hashCode();
     result ^= Byte.valueOf(this.versionNumber).hashCode();
@@ -297,10 +277,6 @@
       bloomFilter = new BloomFilterDescriptor();
       bloomFilter.readFields(in);
     }
-    
-    if (this.versionNumber > 1) {
-      this.blockCacheEnabled = in.readBoolean();
-    }
   }
 
   /** {@inheritDoc} */
@@ -316,8 +292,6 @@
     if(bloomFilterSpecified) {
       bloomFilter.write(out);
     }
-
-    out.writeBoolean(this.blockCacheEnabled);
   }
 
   // Comparable
@@ -346,18 +320,6 @@
         result = 0;
         
       } else if(this.inMemory) {
-        result = -1;
-        
-      } else {
-        result = 1;
-      }
-    }
-    
-    if(result == 0) {
-      if(this.blockCacheEnabled == other.blockCacheEnabled) {
-        result = 0;
-        
-      } else if(this.blockCacheEnabled) {
         result = -1;
         
       } else {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStore.java Mon Feb  4 13:48:13 2008
@@ -741,19 +741,9 @@
     
     // Finally, start up all the map readers! (There could be more than one
     // since we haven't compacted yet.)
-    boolean first = true;
     for(Map.Entry<Long, HStoreFile> e: this.storefiles.entrySet()) {
-      if (first) {
-        // Use a block cache (if configured) for the first reader only
-        // so as to control memory usage.
-        this.readers.put(e.getKey(),
-            e.getValue().getReader(this.fs, this.bloomFilter,
-                family.isBlockCacheEnabled()));
-        first = false;
-      } else {
-        this.readers.put(e.getKey(),
-          e.getValue().getReader(this.fs, this.bloomFilter));
-      }
+      this.readers.put(e.getKey(),
+        e.getValue().getReader(this.fs, this.bloomFilter));
     }
   }
   
@@ -1570,10 +1560,7 @@
           // 6. Loading the new TreeMap.
           Long orderVal = Long.valueOf(finalCompactedFile.loadInfo(fs));
           this.readers.put(orderVal,
-            // Use a block cache (if configured) for this reader since
-            // it is the only one.
-            finalCompactedFile.getReader(this.fs, this.bloomFilter,
-                family.isBlockCacheEnabled()));
+            finalCompactedFile.getReader(this.fs, this.bloomFilter));
           this.storefiles.put(orderVal, finalCompactedFile);
         } catch (IOException e) {
           e = RemoteExceptionHandler.checkIOException(e);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HStoreFile.java Mon Feb  4 13:48:13 2008
@@ -31,11 +31,9 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.io.BlockFSInputStream;
 import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
 import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.io.MapFile;
@@ -415,37 +413,17 @@
    * @return MapFile.Reader
    * @throws IOException
    */
-  public MapFile.Reader getReader(final FileSystem fs,
-    final Filter bloomFilter)
-  throws IOException {
-    return isReference()?
-      new HStoreFile.HalfMapFileReader(fs, getMapFilePath(reference).toString(),
-        conf, reference.getFileRegion(), reference.getMidkey(), bloomFilter):
-      new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
-        conf, bloomFilter);
-  }
-  
-  /**
-   * Get reader for the store file map file.
-   * Client is responsible for closing file when done.
-   * @param fs
-   * @param bloomFilter If null, no filtering is done.
-   * @param blockCacheEnabled If true, MapFile blocks should be cached.
-   * @return MapFile.Reader
-   * @throws IOException
-   */
   public synchronized MapFile.Reader getReader(final FileSystem fs,
-      final Filter bloomFilter, final boolean blockCacheEnabled)
+      final Filter bloomFilter)
   throws IOException {
     
     if (isReference()) {
       return new HStoreFile.HalfMapFileReader(fs,
           getMapFilePath(reference).toString(), conf, 
-          reference.getFileRegion(), reference.getMidkey(), bloomFilter,
-          blockCacheEnabled);
+          reference.getFileRegion(), reference.getMidkey(), bloomFilter);
     }
     return new BloomFilterMapFile.Reader(fs, getMapFilePath().toString(),
-        conf, bloomFilter, blockCacheEnabled);
+        conf, bloomFilter);
   }
 
   /**
@@ -606,13 +584,8 @@
    */
   static class HbaseMapFile extends MapFile {
 
-    /**
-     * A reader capable of reading and caching blocks of the data file.
-     */
     static class HbaseReader extends MapFile.Reader {
       
-      private final boolean blockCacheEnabled;
-      
       /**
        * @param fs
        * @param dirName
@@ -621,23 +594,7 @@
        */
       public HbaseReader(FileSystem fs, String dirName, Configuration conf)
       throws IOException {
-        this(fs, dirName, conf, false);
-      }
-      
-      /**
-       * @param fs
-       * @param dirName
-       * @param conf
-       * @param blockCacheEnabled
-       * @throws IOException
-       */
-      public HbaseReader(FileSystem fs, String dirName, Configuration conf,
-          boolean blockCacheEnabled)
-      throws IOException {
-        super(fs, dirName, null, conf, false); // defer opening streams
-        this.blockCacheEnabled = blockCacheEnabled;
-        open(fs, dirName, null, conf);
-        
+        super(fs, dirName, conf);
         // Force reading of the mapfile index by calling midKey.
         // Reading the index will bring the index into memory over
         // here on the client and then close the index file freeing
@@ -648,28 +605,6 @@
         // using up datanode resources.  See HADOOP-2341.
         midKey();
       }
-
-      @Override
-      protected org.apache.hadoop.io.SequenceFile.Reader createDataFileReader(
-          FileSystem fs, Path dataFile, Configuration conf)
-      throws IOException {
-        if (!blockCacheEnabled) {
-          return super.createDataFileReader(fs, dataFile, conf);
-        }
-        LOG.info("Block Cache enabled");
-        final int blockSize = conf.getInt("hbase.hstore.blockCache.blockSize",
-            64 * 1024);
-        return new SequenceFile.Reader(fs, dataFile,  conf) {
-          @Override
-          protected FSDataInputStream openFile(FileSystem fs, Path file,
-              int bufferSize, long length) throws IOException {
-            
-            return new FSDataInputStream(new BlockFSInputStream(
-                    super.openFile(fs, file, bufferSize, length), length,
-                    blockSize));
-          }
-        };
-      }
     }
     
     static class HbaseWriter extends MapFile.Writer {
@@ -718,13 +653,6 @@
         bloomFilter = filter;
       }
 
-      public Reader(FileSystem fs, String dirName, Configuration conf,
-          final Filter filter, final boolean blockCacheEnabled)
-      throws IOException {
-        super(fs, dirName, conf, blockCacheEnabled);
-        bloomFilter = filter;
-      }
-      
       /** {@inheritDoc} */
       @Override
       public Writable get(WritableComparable key, Writable val)
@@ -817,7 +745,7 @@
         final Configuration conf, final Range r,
         final WritableComparable midKey)
     throws IOException {
-      this(fs, dirName, conf, r, midKey, null, false);
+      this(fs, dirName, conf, r, midKey, null);
     }
     
     HalfMapFileReader(final FileSystem fs, final String dirName, 
@@ -825,16 +753,6 @@
         final WritableComparable midKey, final Filter filter)
     throws IOException {
       super(fs, dirName, conf, filter);
-      top = isTopFileRegion(r);
-      midkey = midKey;
-    }
-    
-    HalfMapFileReader(final FileSystem fs, final String dirName, 
-        final Configuration conf, final Range r,
-        final WritableComparable midKey, final Filter filter,
-        final boolean blockCacheEnabled)
-    throws IOException {
-      super(fs, dirName, conf, filter, blockCacheEnabled);
       top = isTopFileRegion(r);
       midkey = midKey;
     }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Mon Feb  4 13:48:13 2008
@@ -43,15 +43,15 @@
   public static final HTableDescriptor rootTableDesc =
     new HTableDescriptor(HConstants.ROOT_TABLE_NAME,
         new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
-            HColumnDescriptor.CompressionType.NONE, false, false,
-            Integer.MAX_VALUE, null));
+            HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
+            null));
   
   /** table descriptor for meta table */
   public static final HTableDescriptor metaTableDesc =
     new HTableDescriptor(HConstants.META_TABLE_NAME,
         new HColumnDescriptor(HConstants.COLUMN_FAMILY, 1,
-            HColumnDescriptor.CompressionType.NONE, false, false,
-            Integer.MAX_VALUE, null));
+            HColumnDescriptor.CompressionType.NONE, false, Integer.MAX_VALUE,
+            null));
   
   private boolean rootregion;
   private boolean metaregion;
@@ -256,4 +256,4 @@
   public static Path getTableDir(Path rootdir, Text tableName) {
     return new Path(rootdir, tableName.toString());
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/AlterCommand.java Mon Feb  4 13:48:13 2008
@@ -202,8 +202,6 @@
             .get(spec)).toUpperCase());
       } else if (spec.equals("IN_MEMORY")) {
         inMemory = (Boolean) columnSpec.get(spec);
-      } else if (spec.equals("BLOCK_CACHE_ENABLED")) {
-        blockCacheEnabled = (Boolean) columnSpec.get(spec);
       } else if (spec.equals("BLOOMFILTER")) {
         bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
             .toUpperCase());
@@ -231,8 +229,7 @@
     column = appendDelimiter(column);
 
     HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
-        maxVersions, compression, inMemory, blockCacheEnabled,
-        maxLength, bloomFilterDesc);
+        maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
 
     return columnDesc;
   }
@@ -246,7 +243,6 @@
     maxLength = original.getMaxValueLength();
     compression = original.getCompression();
     inMemory = original.isInMemory();
-    blockCacheEnabled = original.isBlockCacheEnabled();
     bloomFilterDesc = original.getBloomFilter();
   }
 }

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/hql/SchemaModificationCommand.java Mon Feb  4 13:48:13 2008
@@ -37,7 +37,6 @@
   protected int maxLength;
   protected HColumnDescriptor.CompressionType compression;
   protected boolean inMemory;
-  protected boolean blockCacheEnabled;
   protected BloomFilterDescriptor bloomFilterDesc;
   protected BloomFilterType bloomFilterType;
   protected int vectorSize;
@@ -53,7 +52,6 @@
     maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
     compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
     inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
-    blockCacheEnabled = HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED;
     bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
   }
 
@@ -78,8 +76,6 @@
             .valueOf(((String) columnSpec.get(spec)).toUpperCase());
       } else if (spec.equals("IN_MEMORY")) {
         inMemory = (Boolean) columnSpec.get(spec);
-      } else if (spec.equals("BLOCK_CACHE_ENABLED")) {
-        blockCacheEnabled = (Boolean) columnSpec.get(spec);
       } else if (spec.equals("BLOOMFILTER")) {
         bloomFilterType = BloomFilterType.valueOf(((String) columnSpec.get(spec))
             .toUpperCase());
@@ -107,8 +103,7 @@
     column = appendDelimiter(column);
 
     HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
-        maxVersions, compression, inMemory, blockCacheEnabled,
-        maxLength, bloomFilterDesc);
+        maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
 
     return columnDesc;
   }

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/HBaseTestCase.java Mon Feb  4 13:48:13 2008
@@ -184,11 +184,11 @@
       final int versions) {
     HTableDescriptor htd = new HTableDescriptor(name);
     htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME1), versions,
-      CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
+      CompressionType.NONE, false,  Integer.MAX_VALUE, null));
     htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME2), versions,
-      CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
+      CompressionType.NONE, false,  Integer.MAX_VALUE, null));
     htd.addFamily(new HColumnDescriptor(new Text(COLFAMILY_NAME3), versions,
-      CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
+      CompressionType.NONE, false,  Integer.MAX_VALUE, null));
     return htd;
   }
   
@@ -565,4 +565,4 @@
       return this.table.get(row, column, ts, versions);
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestBloomFilters.java Mon Feb  4 13:48:13 2008
@@ -169,7 +169,6 @@
             1,                                        // Max versions
             HColumnDescriptor.CompressionType.NONE,   // no compression
             HColumnDescriptor.DEFAULT_IN_MEMORY,      // not in memory
-            HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
             HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
             bloomFilter
         )
@@ -235,7 +234,6 @@
             1,                                        // Max versions
             HColumnDescriptor.CompressionType.NONE,   // no compression
             HColumnDescriptor.DEFAULT_IN_MEMORY,      // not in memory
-            HColumnDescriptor.DEFAULT_BLOCK_CACHE_ENABLED,
             HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH,
             bloomFilter
         )

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestTimestamp.java Mon Feb  4 13:48:13 2008
@@ -337,7 +337,7 @@
   private HRegion createRegion() throws IOException {
     HTableDescriptor htd = createTableDescriptor(getName());
     htd.addFamily(new HColumnDescriptor(COLUMN, VERSIONS,
-      CompressionType.NONE, false, false, Integer.MAX_VALUE, null));
+      CompressionType.NONE, false, Integer.MAX_VALUE, null));
     return createNewHRegion(htd, null, null);
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java?rev=618453&r1=618452&r2=618453&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java (original)
+++ hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/TestToString.java Mon Feb  4 13:48:13 2008
@@ -44,9 +44,8 @@
     HTableDescriptor htd = HTableDescriptor.rootTableDesc;
     System. out.println(htd.toString());
     assertEquals("Table descriptor", "name: -ROOT-, families: {info:={name: " +
-        "info, max versions: 1, compression: NONE, in memory: false, " +
-        "block cache enabled: false, max length: 2147483647, " +
-        "bloom filter: none}}", htd.toString());
+      "info, max versions: 1, compression: NONE, in memory: false, max " +
+      "length: 2147483647, bloom filter: none}}", htd.toString());
   }
   
   /**