You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hama.apache.org by ed...@apache.org on 2009/10/07 14:04:05 UTC

svn commit: r822682 [1/2] - in /incubator/hama/trunk: ./ src/java/org/apache/hama/ src/java/org/apache/hama/matrix/ src/test/ src/test/org/apache/hama/ src/test/org/apache/hama/examples/ src/test/org/apache/hama/graph/ src/test/org/apache/hama/mapred/ ...

Author: edwardyoon
Date: Wed Oct  7 12:04:03 2009
New Revision: 822682

URL: http://svn.apache.org/viewvc?rev=822682&view=rev
Log:
Upgrade dependencies in lib folder

Added:
    incubator/hama/trunk/src/test/hbase-site.xml
    incubator/hama/trunk/src/test/log4j.properties
    incubator/hama/trunk/src/test/org/apache/hama/HamaCluster.java
    incubator/hama/trunk/src/test/org/apache/hama/TestHamaAdmin.java
    incubator/hama/trunk/src/test/org/apache/hama/TestHbaseClient.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestAbstractMatrix.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestJacobiEigenValue.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestSubMatirx.java
Removed:
    incubator/hama/trunk/src/test/org/apache/hama/examples/TestFileMatrixBlockMult.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/HCluster.java
    incubator/hama/trunk/src/test/org/apache/hama/shell/parser/expression/TestHamaExpressionParser.java
Modified:
    incubator/hama/trunk/CHANGES.txt
    incubator/hama/trunk/build.xml
    incubator/hama/trunk/src/java/org/apache/hama/HamaAdminImpl.java
    incubator/hama/trunk/src/java/org/apache/hama/matrix/AbstractMatrix.java
    incubator/hama/trunk/src/java/org/apache/hama/matrix/DenseMatrix.java
    incubator/hama/trunk/src/java/org/apache/hama/matrix/SparseVector.java
    incubator/hama/trunk/src/test/org/apache/hama/graph/TestGraph.java
    incubator/hama/trunk/src/test/org/apache/hama/mapred/TestBlockMatrixMapReduce.java
    incubator/hama/trunk/src/test/org/apache/hama/mapred/TestRandomMatrixMapReduce.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/MatrixTestCommon.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestDenseMatrix.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestDenseVector.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestMatrixVectorMult.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestSingularValueDecomposition.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestSparseMatrix.java
    incubator/hama/trunk/src/test/org/apache/hama/matrix/TestSparseVector.java

Modified: incubator/hama/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/CHANGES.txt?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/CHANGES.txt (original)
+++ incubator/hama/trunk/CHANGES.txt Wed Oct  7 12:04:03 2009
@@ -40,6 +40,7 @@
 
   IMPROVEMENTS
 
+    HAMA-188: Upgrade dependencies for hadoop/hbase 2.0 (edwardyoon)
     HAMA-192: Refactor top-level package (edwardyoon)
     HAMA-189: Update website (edwardyoon)
     HAMA-187: Add matrix subtraction test case (edwardyoon)

Modified: incubator/hama/trunk/build.xml
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/build.xml?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/build.xml (original)
+++ incubator/hama/trunk/build.xml Wed Oct  7 12:04:03 2009
@@ -1,327 +1,324 @@
-<?xml version="1.0"?>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<project name="hama" default="jar">
-    <property name="version" value="0.1.0-dev" />
-    <property name="Name" value="Hama" />
-    <property name="final.name" value="hama-${version}" />
-    <property name="year" value="2008" />
-
-    <!-- Load all the default properties, and any the user wants    -->
-    <!-- to contribute (without having to type -D or edit this file -->
-    <property file="${user.home}/${name}.build.properties" />
-    <property file="${basedir}/build.properties" />
-
-    <property name="src.dir" location="${basedir}/src/java" />
-    <property name="src.gen.dir" location="${basedir}/src-gen" />
-    <property name="src.test" location="${basedir}/src/test" />
-    <property name="src.examples" location="${basedir}/src/examples" />
-
-    <property name="lib.dir" value="${basedir}/lib" />
-    <property name="conf.dir" value="${basedir}/conf" />
-    <property name="docs.dir" value="${basedir}/docs" />
-    <property name="docs.src" value="${basedir}/src/docs" />
-
-	<!-- javacc properties -->
-    <property name="javacc.home" value="${basedir}/lib" />
-
-    <property name="src.gen.parser.expression.dir" value="${src.gen.dir}/org/apache/hama/shell/parser/expression" />
-    <property name="src.gen.parser.script.dir" value="${src.gen.dir}/org/apache/hama/shell/parser/script" />
-
-	<!-- build properties -->
-	
-    <property name="test.output" value="no" />
-    <property name="test.timeout" value="600000" />
-
-    <property name="build.dir" location="${basedir}/build" />
-    <property name="build.lib" location="${build.dir}/lib" />
-    <property name="build.classes" location="${build.dir}/classes" />
-    <property name="build.test" location="${build.dir}/test" />
-    <property name="build.examples" location="${build.dir}/examples" />
-    <property name="build.docs" value="${build.dir}/docs/site" />
-    <property name="build.javadoc" value="${build.docs}/api" />
-    <property name="build.encoding" value="ISO-8859-1" />
-    <property name="build.src" value="${build.dir}/src" />
-    
-    <property name="build.report" value="${build.dir}/reports" />
-    <property name="build.report.findbugs" value="${build.report}/findbugs" />
-    <property name="build.report.tests" value="${build.report}/tests" />
-    
-    <property name="test.build.dir" value="${build.dir}/test" />
-    <property name="test.junit.output.format" value="plain" />
-
-    <property name="dist.dir" value="${build.dir}/${final.name}" />
-
-    <property name="javac.deprecation" value="off" />
-    <property name="javac.debug" value="on" />
-
-    <property name="javadoc.link.java" 
-                 value="http://java.sun.com/javase/6/docs/api/" />
-    <property name="javadoc.packages" value="org.apache.hama.*" />
-
-    <fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar" />
-    <path id="classpath">
-        <fileset refid="lib.jars" />
-        <fileset dir="${lib.dir}/jetty-ext/">
-            <include name="*jar" />
-        </fileset>
-        <fileset dir="${lib.dir}/findbugs/">
-            <include name="*jar" />
-        </fileset>
-        <pathelement location="${build.classes}" />
-        <pathelement location="${conf.dir}" />
-    </path>
-
-    <taskdef name="findbugs" classpathref="classpath" 
-    	classname="edu.umd.cs.findbugs.anttask.FindBugsTask" />
-
-    <target name="init">
-        <mkdir dir="${src.gen.dir}" />
-        <mkdir dir="${src.gen.parser.script.dir}" />
-        <mkdir dir="${src.gen.parser.expression.dir}" />
-        <mkdir dir="${build.dir}" />
-        <mkdir dir="${build.classes}" />
-        <mkdir dir="${build.test}" />
-        <mkdir dir="${build.examples}" />
-        <mkdir dir="${build.report.findbugs}" />
-        <mkdir dir="${build.report.tests}" />
-        <!--Copy bin, lib, and conf. too-->
-        <mkdir dir="${build.lib}" />
-        <copy todir="${build.lib}">
-            <fileset dir="${lib.dir}" />
-        </copy>
-    </target>
-	
-	<!-- ================================================================== -->
-    <!-- Java Compiler Compiler, generate Parsers                           -->
-    <!-- ================================================================== -->
-    <target name="cc-compile" depends="init" description="Create and Compile Parser">
-        <jjtree target="${src.dir}/org/apache/hama/shell/parser/expression/HamaExpressionParser.jjt" outputdirectory="${src.gen.parser.expression.dir}" javacchome="${javacc.home}" />
-        <javacc target="${src.gen.parser.expression.dir}/HamaExpressionParser.jj" outputdirectory="${src.gen.parser.expression.dir}" javacchome="${javacc.home}" />
-        <javacc target="${src.dir}/org/apache/hama/shell/parser/script/HamaScriptParser.jj" outputdirectory="${src.gen.parser.script.dir}" javacchome="${javacc.home}" />
-    </target>
-
-    <target name="compile" depends="init, cc-compile">
-        <!--Compile whats under src and generated java classes made from jsp-->
-        <mkdir dir="${build.src}" />
-        <javac encoding="${build.encoding}" 
-                srcdir="${src.dir};${src.gen.dir};${build.src}" includes="**/*.java" 
-        	destdir="${build.classes}" debug="${javac.debug}" 
-                deprecation="${javac.deprecation}">
-            <classpath refid="classpath" />
-        </javac>
-    </target>
-
-    <target name="compile-examples" depends="compile">
-        <javac encoding="${build.encoding}" srcdir="${src.examples}" 
-                includes="**/*.java" 
-        	destdir="${build.examples}" debug="${javac.debug}" 
-                deprecation="${javac.deprecation}">
-            <classpath refid="classpath" />
-        </javac>
-    </target>
-
-    <!-- Override jar target to specify main class -->
-    <target name="jar" depends="compile">
-        <jar jarfile="${build.dir}/${final.name}.jar" 
-              basedir="${build.classes}">
-            <manifest>
-                <attribute name="Main-Class" 
-                    value="org/apache/hama/shell/HamaShell" />
-            </manifest>
-        </jar>
-    </target>
-
-    <target name="examples" depends="jar, compile-examples" 
-    	description="Make the hama examples jar.">
-        <jar jarfile="${build.dir}/${final.name}-examples.jar" 
-                basedir="${build.examples}">
-            <manifest>
-                <attribute name="Main-Class" 
-                    value="org/apache/hama/examples/ExampleDriver" />
-            </manifest>
-        </jar>
-    </target>
-
-    <target name="package" depends="jar,javadoc,compile-test, examples" 
-    	description="Build distribution">
-        <mkdir dir="${dist.dir}" />
-        <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
-            <fileset dir="${build.dir}">
-                <include name="${final.name}.jar" />
-                <include name="${final.name}-test.jar" />
-            </fileset>
-        </copy>
-        <mkdir dir="${dist.dir}/lib" />
-        <copy todir="${dist.dir}/lib">
-            <fileset dir="${build.lib}" />
-        </copy>
-        <copy todir="${dist.dir}">
-            <fileset dir=".">
-                <include name="*.txt" />
-            </fileset>
-        </copy>
-        <mkdir dir="${dist.dir}/src" />
-        <copy todir="${dist.dir}/src" includeEmptyDirs="true">
-            <fileset dir="src" excludes="**/*.template **/docs/build/**/*" />
-        </copy>
-    </target>
-
-    <!-- ================================================================== -->
-    <!-- Make release tarball                                               -->
-    <!-- ================================================================== -->
-    <macrodef name="macro_tar" description="Worker Macro for tar">
-        <attribute name="param.destfile" />
-        <element name="param.listofitems" />
-        <sequential>
-            <tar compression="gzip" longfile="gnu" destfile="@{param.destfile}">
-                <param.listofitems />
-            </tar>
-        </sequential>
-    </macrodef>
-    <target name="tar" depends="package" description="Make release tarball">
-        <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
-            <param.listofitems>
-                <tarfileset dir="${build.dir}" mode="664">
-                    <exclude name="${final.name}/bin/*" />
-                    <include name="${final.name}/**" />
-                </tarfileset>
-                <tarfileset dir="${build.dir}" mode="755">
-                    <include name="${final.name}/bin/*" />
-                </tarfileset>
-            </param.listofitems>
-        </macro_tar>
-    </target>
-
-    <target name="binary" depends="package" 
-    	description="Make tarball without source and documentation">
-        <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
-            <param.listofitems>
-                <tarfileset dir="${build.dir}" mode="664">
-                    <exclude name="${final.name}/bin/*" />
-                    <exclude name="${final.name}/src/**" />
-                    <exclude name="${final.name}/docs/**" />
-                    <include name="${final.name}/**" />
-                </tarfileset>
-                <tarfileset dir="${build.dir}" mode="755">
-                    <include name="${final.name}/bin/*" />
-                </tarfileset>
-            </param.listofitems>
-        </macro_tar>
-    </target>
-
-    <!-- ================================================================== -->
-    <!-- Doc                                                                -->
-    <!-- ================================================================== -->
-    <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
-        <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest" 
-               failonerror="true">
-        	<env key="JAVA_HOME" value="${java5.home}"/>
-        </exec>
-        <copy todir="${build.docs}">
-          <fileset dir="${docs.src}/build/site/" />
-        </copy>
-    	<delete dir="${docs.src}/build/"/>
-    </target>
-
-    <target name="forrest.check" unless="forrest.home">
-        <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
-    </target>
-
-    <!-- Javadoc -->
-    <target name="javadoc" depends="cc-compile" description="Generate javadoc">
-        <mkdir dir="${build.javadoc}" />
-        <javadoc overview="${src.dir}/overview.html" packagenames="org.apache.hama.*" 
-        	    destdir="${build.javadoc}" author="true" version="true" use="true" 
-        	    windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" 
-        	    bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
-            <packageset dir="${src.dir}">
-                <include name="org/apache/**" />
-            </packageset>
-            <packageset dir="${src.gen.dir}">
-                <include name="org/apache/**" />
-            </packageset>
-            <link href="${javadoc.link.java}" />
-            <classpath>
-                <path refid="classpath" />
-                <pathelement path="${java.class.path}" />
-            </classpath>
-            <group title="${Name}" packages="org.apache.hama.*" />
-        </javadoc>
-    </target>
-
-    <!-- ================================================================== -->
-    <!-- Run unit tests                                                     -->
-    <!-- ================================================================== -->
-    <path id="test.classpath">
-        <pathelement location="${src.test}" />
-        <pathelement location="${build.test}" />
-        <path refid="classpath" />
-        <pathelement location="${build.dir}" />
-    </path>
-
-    <target name="compile-test" depends="compile">
-        <javac encoding="${build.encoding}" srcdir="${src.test}" 
-           includes="**/*.java" destdir="${build.test}" debug="${javac.debug}">
-            <classpath refid="test.classpath" />
-        </javac>
-        <jar jarfile="${build.dir}/${final.name}-test.jar">
-            <fileset dir="${build.test}" includes="org/**" />
-            <fileset dir="${build.classes}" />
-            <fileset dir="${src.test}" includes="**/*.properties" />
-            <manifest>
-              <attribute name="Main-Class" value="org/apache/hama/Benchmarks" />
-            </manifest>
-        </jar>
-    </target>
-
-    <target name="test" depends="compile-test, compile">
-        <junit printsummary="yes" showoutput="${test.output}" haltonfailure="no"
-              fork="yes" maxmemory="512m" errorProperty="tests.failed" 
-        	     failureProperty="tests.failed" timeout="${test.timeout}">
-            <classpath refid="test.classpath" />
-            <formatter type="${test.junit.output.format}" />
-            <batchtest todir="${build.report.tests}">
-                <fileset dir="${src.test}" includes="**/Test*.java" 
-                	 excludes="**/${test.exclude}.java" />
-            </batchtest>
-        </junit>
-        <fail if="tests.failed">Tests failed!</fail>
-    </target>
-
-    <target name="findbugs" depends="init, jar">
-        <findbugs home="${lib.dir}/findbugs" output="xml" 
-        	     outputFile="${build.report.findbugs}/hama-findbugs.xml" 
-               excludeFilter="${conf.dir}/findbugs-exclude-filter.xml"
-        	     auxClasspathRef="classpath">
-            <sourcePath path="${src.dir}" />
-            <class location="${build.dir}/${final.name}.jar" />
-        </findbugs>
-    </target>
-    
-    <target name="report" depends="findbugs,test"></target>
-    
-    <!-- ================================================================== -->
-    <!-- Clean.  Delete the build files, and their directories              -->
-    <!-- ================================================================== -->
-    <target name="clean">
-        <delete dir="${src.gen.dir}" />
-        <delete dir="${build.dir}" />
-    </target>
-</project>
+<?xml version="1.0"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<project name="hama" default="jar">
+    <property name="version" value="0.2.0-dev" />
+    <property name="Name" value="Hama" />
+    <property name="final.name" value="hama-${version}" />
+    <property name="year" value="2009" />
+
+    <!-- Load all the default properties, and any the user wants    -->
+    <!-- to contribute (without having to type -D or edit this file -->
+    <property file="${user.home}/${name}.build.properties" />
+    <property file="${basedir}/build.properties" />
+
+    <property name="src.dir" location="${basedir}/src/java" />
+    <property name="src.gen.dir" location="${basedir}/src-gen" />
+    <property name="src.test" location="${basedir}/src/test" />
+    <property name="src.examples" location="${basedir}/src/examples" />
+
+    <property name="lib.dir" value="${basedir}/lib" />
+    <property name="conf.dir" value="${basedir}/conf" />
+    <property name="docs.dir" value="${basedir}/docs" />
+    <property name="docs.src" value="${basedir}/src/docs" />
+
+	<!-- javacc properties -->
+    <property name="javacc.home" value="${basedir}/lib" />
+
+    <property name="src.gen.parser.expression.dir" value="${src.gen.dir}/org/apache/hama/shell/parser/expression" />
+    <property name="src.gen.parser.script.dir" value="${src.gen.dir}/org/apache/hama/shell/parser/script" />
+
+	<!-- build properties -->
+	
+    <property name="test.output" value="no" />
+    <property name="test.timeout" value="600000" />
+
+    <property name="build.dir" location="${basedir}/build" />
+    <property name="build.lib" location="${build.dir}/lib" />
+    <property name="build.classes" location="${build.dir}/classes" />
+    <property name="build.test" location="${build.dir}/test" />
+    <property name="build.examples" location="${build.dir}/examples" />
+    <property name="build.docs" value="${build.dir}/docs/site" />
+    <property name="build.javadoc" value="${build.docs}/api" />
+    <property name="build.encoding" value="ISO-8859-1" />
+    <property name="build.src" value="${build.dir}/src" />
+    
+    <property name="build.report" value="${build.dir}/reports" />
+    <property name="build.report.findbugs" value="${build.report}/findbugs" />
+    <property name="build.report.tests" value="${build.report}/tests" />
+    
+    <property name="test.build.dir" value="${build.dir}/test" />
+    <property name="test.junit.output.format" value="plain" />
+
+    <property name="dist.dir" value="${build.dir}/${final.name}" />
+
+    <property name="javac.deprecation" value="off" />
+    <property name="javac.debug" value="on" />
+
+    <property name="javadoc.link.java" 
+                 value="http://java.sun.com/javase/6/docs/api/" />
+    <property name="javadoc.packages" value="org.apache.hama.*" />
+
+    <fileset id="lib.jars" dir="${basedir}" includes="lib/*.jar" />
+    <path id="classpath">
+        <fileset refid="lib.jars" />
+        <fileset dir="${lib.dir}/findbugs/">
+            <include name="*jar" />
+        </fileset>
+        <pathelement location="${build.classes}" />
+        <pathelement location="${conf.dir}" />
+    </path>
+
+    <taskdef name="findbugs" classpathref="classpath" 
+    	classname="edu.umd.cs.findbugs.anttask.FindBugsTask" />
+
+    <target name="init">
+        <mkdir dir="${src.gen.dir}" />
+        <mkdir dir="${src.gen.parser.script.dir}" />
+        <mkdir dir="${src.gen.parser.expression.dir}" />
+        <mkdir dir="${build.dir}" />
+        <mkdir dir="${build.classes}" />
+        <mkdir dir="${build.test}" />
+        <mkdir dir="${build.examples}" />
+        <mkdir dir="${build.report.findbugs}" />
+        <mkdir dir="${build.report.tests}" />
+        <!--Copy bin, lib, and conf. too-->
+        <mkdir dir="${build.lib}" />
+        <copy todir="${build.lib}">
+            <fileset dir="${lib.dir}" />
+        </copy>
+    </target>
+	
+	<!-- ================================================================== -->
+    <!-- Java Compiler Compiler, generate Parsers                           -->
+    <!-- ================================================================== -->
+    <target name="cc-compile" depends="init" description="Create and Compile Parser">
+        <jjtree target="${src.dir}/org/apache/hama/shell/parser/expression/HamaExpressionParser.jjt" outputdirectory="${src.gen.parser.expression.dir}" javacchome="${javacc.home}" />
+        <javacc target="${src.gen.parser.expression.dir}/HamaExpressionParser.jj" outputdirectory="${src.gen.parser.expression.dir}" javacchome="${javacc.home}" />
+        <javacc target="${src.dir}/org/apache/hama/shell/parser/script/HamaScriptParser.jj" outputdirectory="${src.gen.parser.script.dir}" javacchome="${javacc.home}" />
+    </target>
+
+    <target name="compile" depends="init, cc-compile">
+        <!--Compile whats under src and generated java classes made from jsp-->
+        <mkdir dir="${build.src}" />
+        <javac encoding="${build.encoding}" 
+                srcdir="${src.dir};${src.gen.dir};${build.src}" includes="**/*.java" 
+        	destdir="${build.classes}" debug="${javac.debug}" 
+                deprecation="${javac.deprecation}">
+            <classpath refid="classpath" />
+        </javac>
+    </target>
+
+    <target name="compile-examples" depends="compile">
+        <javac encoding="${build.encoding}" srcdir="${src.examples}" 
+                includes="**/*.java" 
+        	destdir="${build.examples}" debug="${javac.debug}" 
+                deprecation="${javac.deprecation}">
+            <classpath refid="classpath" />
+        </javac>
+    </target>
+
+    <!-- Override jar target to specify main class -->
+    <target name="jar" depends="compile">
+        <jar jarfile="${build.dir}/${final.name}.jar" 
+              basedir="${build.classes}">
+            <manifest>
+                <attribute name="Main-Class" 
+                    value="org/apache/hama/shell/HamaShell" />
+            </manifest>
+        </jar>
+    </target>
+
+    <target name="examples" depends="jar, compile-examples" 
+    	description="Make the hama examples jar.">
+        <jar jarfile="${build.dir}/${final.name}-examples.jar" 
+                basedir="${build.examples}">
+            <manifest>
+                <attribute name="Main-Class" 
+                    value="org/apache/hama/examples/ExampleDriver" />
+            </manifest>
+        </jar>
+    </target>
+
+    <target name="package" depends="jar,javadoc,compile-test, examples" 
+    	description="Build distribution">
+        <mkdir dir="${dist.dir}" />
+        <copy todir="${dist.dir}" includeEmptyDirs="false" flatten="true">
+            <fileset dir="${build.dir}">
+                <include name="${final.name}.jar" />
+                <include name="${final.name}-test.jar" />
+            </fileset>
+        </copy>
+        <mkdir dir="${dist.dir}/lib" />
+        <copy todir="${dist.dir}/lib">
+            <fileset dir="${build.lib}" />
+        </copy>
+        <copy todir="${dist.dir}">
+            <fileset dir=".">
+                <include name="*.txt" />
+            </fileset>
+        </copy>
+        <mkdir dir="${dist.dir}/src" />
+        <copy todir="${dist.dir}/src" includeEmptyDirs="true">
+            <fileset dir="src" excludes="**/*.template **/docs/build/**/*" />
+        </copy>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Make release tarball                                               -->
+    <!-- ================================================================== -->
+    <macrodef name="macro_tar" description="Worker Macro for tar">
+        <attribute name="param.destfile" />
+        <element name="param.listofitems" />
+        <sequential>
+            <tar compression="gzip" longfile="gnu" destfile="@{param.destfile}">
+                <param.listofitems />
+            </tar>
+        </sequential>
+    </macrodef>
+    <target name="tar" depends="package" description="Make release tarball">
+        <macro_tar param.destfile="${build.dir}/${final.name}.tar.gz">
+            <param.listofitems>
+                <tarfileset dir="${build.dir}" mode="664">
+                    <exclude name="${final.name}/bin/*" />
+                    <include name="${final.name}/**" />
+                </tarfileset>
+                <tarfileset dir="${build.dir}" mode="755">
+                    <include name="${final.name}/bin/*" />
+                </tarfileset>
+            </param.listofitems>
+        </macro_tar>
+    </target>
+
+    <target name="binary" depends="package" 
+    	description="Make tarball without source and documentation">
+        <macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
+            <param.listofitems>
+                <tarfileset dir="${build.dir}" mode="664">
+                    <exclude name="${final.name}/bin/*" />
+                    <exclude name="${final.name}/src/**" />
+                    <exclude name="${final.name}/docs/**" />
+                    <include name="${final.name}/**" />
+                </tarfileset>
+                <tarfileset dir="${build.dir}" mode="755">
+                    <include name="${final.name}/bin/*" />
+                </tarfileset>
+            </param.listofitems>
+        </macro_tar>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Doc                                                                -->
+    <!-- ================================================================== -->
+    <target name="docs" depends="forrest.check" description="Generate forrest-based documentation. To use, specify -Dforrest.home=&lt;base of Apache Forrest installation&gt; on the command line." if="forrest.home">
+        <exec dir="${docs.src}" executable="${forrest.home}/bin/forrest" 
+               failonerror="true">
+        	<env key="JAVA_HOME" value="${java5.home}"/>
+        </exec>
+        <copy todir="${build.docs}">
+          <fileset dir="${docs.src}/build/site/" />
+        </copy>
+    	<delete dir="${docs.src}/build/"/>
+    </target>
+
+    <target name="forrest.check" unless="forrest.home">
+        <fail message="'forrest.home' is not defined. Please pass -Dforrest.home=&lt;base of Apache Forrest installation&gt; to Ant on the command-line." />
+    </target>
+
+    <!-- Javadoc -->
+    <target name="javadoc" depends="cc-compile" description="Generate javadoc">
+        <mkdir dir="${build.javadoc}" />
+        <javadoc overview="${src.dir}/overview.html" packagenames="org.apache.hama.*" 
+        	    destdir="${build.javadoc}" author="true" version="true" use="true" 
+        	    windowtitle="${Name} ${version} API" doctitle="${Name} ${version} API" 
+        	    bottom="Copyright &amp;copy; ${year} The Apache Software Foundation">
+            <packageset dir="${src.dir}">
+                <include name="org/apache/**" />
+            </packageset>
+            <packageset dir="${src.gen.dir}">
+                <include name="org/apache/**" />
+            </packageset>
+            <link href="${javadoc.link.java}" />
+            <classpath>
+                <path refid="classpath" />
+                <pathelement path="${java.class.path}" />
+            </classpath>
+            <group title="${Name}" packages="org.apache.hama.*" />
+        </javadoc>
+    </target>
+
+    <!-- ================================================================== -->
+    <!-- Run unit tests                                                     -->
+    <!-- ================================================================== -->
+    <path id="test.classpath">
+        <pathelement location="${src.test}" />
+        <pathelement location="${build.test}" />
+        <path refid="classpath" />
+        <pathelement location="${build.dir}" />
+    </path>
+
+    <target name="compile-test" depends="compile">
+        <javac encoding="${build.encoding}" srcdir="${src.test}" 
+           includes="**/*.java" destdir="${build.test}" debug="${javac.debug}">
+            <classpath refid="test.classpath" />
+        </javac>
+        <jar jarfile="${build.dir}/${final.name}-test.jar">
+            <fileset dir="${build.test}" includes="org/**" />
+            <fileset dir="${build.classes}" />
+            <fileset dir="${src.test}" includes="**/*.properties" />
+            <manifest>
+              <attribute name="Main-Class" value="org/apache/hama/Benchmarks" />
+            </manifest>
+        </jar>
+    </target>
+
+    <target name="test" depends="compile-test, compile">
+        <junit printsummary="yes" showoutput="${test.output}" haltonfailure="no"
+              fork="yes" maxmemory="512m" errorProperty="tests.failed" 
+        	     failureProperty="tests.failed" timeout="${test.timeout}">
+            <classpath refid="test.classpath" />
+            <formatter type="${test.junit.output.format}" />
+            <batchtest todir="${build.report.tests}">
+                <fileset dir="${src.test}" includes="**/Test*.java" 
+                	 excludes="**/${test.exclude}.java" />
+            </batchtest>
+        </junit>
+        <fail if="tests.failed">Tests failed!</fail>
+    </target>
+
+    <target name="findbugs" depends="init, jar">
+        <findbugs home="${lib.dir}/findbugs" output="xml" 
+        	     outputFile="${build.report.findbugs}/hama-findbugs.xml" 
+               excludeFilter="${conf.dir}/findbugs-exclude-filter.xml"
+        	     auxClasspathRef="classpath">
+            <sourcePath path="${src.dir}" />
+            <class location="${build.dir}/${final.name}.jar" />
+        </findbugs>
+    </target>
+    
+    <target name="report" depends="findbugs,test"></target>
+    
+    <!-- ================================================================== -->
+    <!-- Clean.  Delete the build files, and their directories              -->
+    <!-- ================================================================== -->
+    <target name="clean">
+        <delete dir="${src.gen.dir}" />
+        <delete dir="${build.dir}" />
+    </target>
+</project>

Modified: incubator/hama/trunk/src/java/org/apache/hama/HamaAdminImpl.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/HamaAdminImpl.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/HamaAdminImpl.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/HamaAdminImpl.java Wed Oct  7 12:04:03 2009
@@ -25,8 +25,10 @@
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.RegionException;
+import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.io.Cell;
 import org.apache.hadoop.hbase.util.Bytes;

Modified: incubator/hama/trunk/src/java/org/apache/hama/matrix/AbstractMatrix.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/matrix/AbstractMatrix.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/matrix/AbstractMatrix.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/matrix/AbstractMatrix.java Wed Oct  7 12:04:03 2009
@@ -143,6 +143,18 @@
   protected void create() throws IOException {
     // It should run only when table doesn't exist.
     if (!admin.tableExists(matrixPath)) {
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(Constants.COLUMN)));
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(Constants.ATTRIBUTE)));
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(Constants.ALIASEFAMILY)));
+      
+      // It's a temporary data.
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(Constants.BLOCK)));
+     // the following families are used in JacobiEigenValue computation
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(JacobiEigenValue.EI)));
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(JacobiEigenValue.EICOL)));
+      this.tableDesc.addFamily(new HColumnDescriptor(Bytes.toBytes(JacobiEigenValue.EIVEC)));
+      
+      /*
       this.tableDesc.addFamily(new HColumnDescriptor(Bytes
           .toBytes(Constants.COLUMN), 3, CompressionType.NONE, false, false,
           Integer.MAX_VALUE, HConstants.FOREVER, false));
@@ -162,7 +174,7 @@
       this.tableDesc.addFamily(new HColumnDescriptor(Bytes
           .toBytes(JacobiEigenValue.EIVEC), 10, CompressionType.NONE, false,
           false, Integer.MAX_VALUE, HConstants.FOREVER, false));
-
+      */
       LOG.info("Initializing the matrix storage.");
       this.admin.createTable(this.tableDesc);
       LOG.info("Create Matrix " + matrixPath);

Modified: incubator/hama/trunk/src/java/org/apache/hama/matrix/DenseMatrix.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/matrix/DenseMatrix.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/matrix/DenseMatrix.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/matrix/DenseMatrix.java Wed Oct  7 12:04:03 2009
@@ -59,7 +59,6 @@
 import org.apache.hama.mapred.RandomMatrixMap;
 import org.apache.hama.mapred.RandomMatrixReduce;
 import org.apache.hama.mapred.VectorInputFormat;
-import org.apache.hama.matrix.Matrix.Norm;
 import org.apache.hama.matrix.algebra.BlockMultiplyMap;
 import org.apache.hama.matrix.algebra.BlockMultiplyReduce;
 import org.apache.hama.matrix.algebra.DenseMatrixVectorMultMap;

Modified: incubator/hama/trunk/src/java/org/apache/hama/matrix/SparseVector.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/java/org/apache/hama/matrix/SparseVector.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/java/org/apache/hama/matrix/SparseVector.java (original)
+++ incubator/hama/trunk/src/java/org/apache/hama/matrix/SparseVector.java Wed Oct  7 12:04:03 2009
@@ -1,186 +1,185 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.matrix;
-
-import java.io.IOException;
-import java.util.Map;
-
-import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hama.io.DoubleEntry;
-import org.apache.hama.matrix.Vector.Norm;
-import org.apache.log4j.Logger;
-
-/**
- * This class represents a sparse vector.
- */
-public class SparseVector extends AbstractVector implements Vector {
-  static final Logger LOG = Logger.getLogger(SparseVector.class);
-
-  public SparseVector() {
-    this(new MapWritable());
-  }
-
-  public SparseVector(MapWritable m) {
-    this.entries = m;
-  }
-
-  public SparseVector(RowResult row) {
-    this.initMap(row);
-  }
-
-  @Override
-  public Vector add(double alpha, Vector v) {
-    if (alpha == 0)
-      return this;
-
-    for (Map.Entry<Writable, Writable> e : v.getEntries().entrySet()) {
-      if (this.entries.containsKey(e.getKey())) {
-        // add
-        double value = alpha * ((DoubleEntry) e.getValue()).getValue()
-            + this.get(((IntWritable) e.getKey()).get());
-        this.entries.put(e.getKey(), new DoubleEntry(value));
-      } else {
-        // put
-        double value = alpha * ((DoubleEntry) e.getValue()).getValue();
-        this.entries.put(e.getKey(), new DoubleEntry(value));
-      }
-    }
-
-    return this;
-  }
-
-  /**
-   * x = v + x
-   * 
-   * @param v2
-   * @return x = v + x
-   */
-  public SparseVector add(Vector v2) {
-
-    for (Map.Entry<Writable, Writable> e : v2.getEntries().entrySet()) {
-      int key = ((IntWritable) e.getKey()).get();
-      if (this.entries.containsKey(e.getKey())) {
-        this.add(key, ((DoubleEntry) e.getValue()).getValue());
-      } else {
-        this.set(key, ((DoubleEntry) e.getValue()).getValue());
-      }
-    }
-
-    return this;
-  }
-
-  @Override
-  public double dot(Vector v) {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  @Override
-  public double norm(Norm type) {
-    // TODO Auto-generated method stub
-    return 0;
-  }
-
-  /**
-   * v = alpha*v
-   * 
-   * @param alpha
-   * @return v = alpha*v
-   */
-  public SparseVector scale(double alpha) {
-    for (Map.Entry<Writable, Writable> e : this.entries.entrySet()) {
-      this.entries.put(e.getKey(), new DoubleEntry(((DoubleEntry) e.getValue())
-          .getValue()
-          * alpha));
-    }
-    return this;
-  }
-
-  /**
-   * Gets the value of index
-   * 
-   * @param index
-   * @return the value of v(index)
-   * @throws IOException
-   */
-  public double get(int index) {
-    double value;
-    try {
-      value = ((DoubleEntry) this.entries.get(new IntWritable(index)))
-          .getValue();
-    } catch (NullPointerException e) { // returns zero if there is no value
-      return 0;
-    }
-
-    return value;
-  }
-
-  /**
-   * Sets the value of index
-   * 
-   * @param index
-   * @param value
-   */
-  public void set(int index, double value) {
-    // If entries are null, create new object
-    if (this.entries == null) {
-      this.entries = new MapWritable();
-    }
-
-    if (value != 0) // only stores non-zero element
-      this.entries.put(new IntWritable(index), new DoubleEntry(value));
-  }
-
-  /**
-   * Adds the value to v(index)
-   * 
-   * @param index
-   * @param value
-   */
-  public void add(int index, double value) {
-    set(index, get(index) + value);
-  }
-
-  /**
-   * Sets the vector
-   * 
-   * @param v
-   * @return x = v
-   */
-  public SparseVector set(Vector v) {
-    return new SparseVector(v.getEntries());
-  }
-
-  @Override
-  public Vector subVector(int i0, int i1) {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-  @Override
-  public Vector set(double alpha, Vector v) {
-    // TODO Auto-generated method stub
-    return null;
-  }
-
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama.matrix;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.hbase.io.RowResult;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.MapWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hama.io.DoubleEntry;
+import org.apache.log4j.Logger;
+
+/**
+ * This class represents a sparse vector.
+ */
+public class SparseVector extends AbstractVector implements Vector {
+  static final Logger LOG = Logger.getLogger(SparseVector.class);
+
+  public SparseVector() {
+    this(new MapWritable());
+  }
+
+  public SparseVector(MapWritable m) {
+    this.entries = m;
+  }
+
+  public SparseVector(RowResult row) {
+    this.initMap(row);
+  }
+
+  @Override
+  public Vector add(double alpha, Vector v) {
+    if (alpha == 0)
+      return this;
+
+    for (Map.Entry<Writable, Writable> e : v.getEntries().entrySet()) {
+      if (this.entries.containsKey(e.getKey())) {
+        // add
+        double value = alpha * ((DoubleEntry) e.getValue()).getValue()
+            + this.get(((IntWritable) e.getKey()).get());
+        this.entries.put(e.getKey(), new DoubleEntry(value));
+      } else {
+        // put
+        double value = alpha * ((DoubleEntry) e.getValue()).getValue();
+        this.entries.put(e.getKey(), new DoubleEntry(value));
+      }
+    }
+
+    return this;
+  }
+
+  /**
+   * x = v + x
+   * 
+   * @param v2
+   * @return x = v + x
+   */
+  public SparseVector add(Vector v2) {
+
+    for (Map.Entry<Writable, Writable> e : v2.getEntries().entrySet()) {
+      int key = ((IntWritable) e.getKey()).get();
+      if (this.entries.containsKey(e.getKey())) {
+        this.add(key, ((DoubleEntry) e.getValue()).getValue());
+      } else {
+        this.set(key, ((DoubleEntry) e.getValue()).getValue());
+      }
+    }
+
+    return this;
+  }
+
+  @Override
+  public double dot(Vector v) {
+    // TODO Auto-generated method stub
+    return 0;
+  }
+
+  @Override
+  public double norm(Norm type) {
+    // TODO Auto-generated method stub
+    return 0;
+  }
+
+  /**
+   * v = alpha*v
+   * 
+   * @param alpha
+   * @return v = alpha*v
+   */
+  public SparseVector scale(double alpha) {
+    for (Map.Entry<Writable, Writable> e : this.entries.entrySet()) {
+      this.entries.put(e.getKey(), new DoubleEntry(((DoubleEntry) e.getValue())
+          .getValue()
+          * alpha));
+    }
+    return this;
+  }
+
+  /**
+   * Gets the value of index
+   * 
+   * @param index
+   * @return the value of v(index)
+   * @throws IOException
+   */
+  public double get(int index) {
+    double value;
+    try {
+      value = ((DoubleEntry) this.entries.get(new IntWritable(index)))
+          .getValue();
+    } catch (NullPointerException e) { // returns zero if there is no value
+      return 0;
+    }
+
+    return value;
+  }
+
+  /**
+   * Sets the value of index
+   * 
+   * @param index
+   * @param value
+   */
+  public void set(int index, double value) {
+    // If entries are null, create new object
+    if (this.entries == null) {
+      this.entries = new MapWritable();
+    }
+
+    if (value != 0) // only stores non-zero element
+      this.entries.put(new IntWritable(index), new DoubleEntry(value));
+  }
+
+  /**
+   * Adds the value to v(index)
+   * 
+   * @param index
+   * @param value
+   */
+  public void add(int index, double value) {
+    set(index, get(index) + value);
+  }
+
+  /**
+   * Sets the vector
+   * 
+   * @param v
+   * @return x = v
+   */
+  public SparseVector set(Vector v) {
+    return new SparseVector(v.getEntries());
+  }
+
+  @Override
+  public Vector subVector(int i0, int i1) {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override
+  public Vector set(double alpha, Vector v) {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+}

Added: incubator/hama/trunk/src/test/hbase-site.xml
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/hbase-site.xml?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/hbase-site.xml (added)
+++ incubator/hama/trunk/src/test/hbase-site.xml Wed Oct  7 12:04:03 2009
@@ -0,0 +1,137 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>hbase.regionserver.msginterval</name>
+    <value>1000</value>
+    <description>Interval between messages from the RegionServer to HMaster
+    in milliseconds.  Default is 15. Set this value low if you want unit
+    tests to be responsive.
+    </description>
+  </property>
+  <property>
+    <name>hbase.client.pause</name>
+    <value>5000</value>
+    <description>General client pause value.  Used mostly as value to wait
+    before running a retry of a failed get, region lookup, etc.</description>
+  </property>
+  <property>
+    <name>hbase.master.meta.thread.rescanfrequency</name>
+    <value>10000</value>
+    <description>How long the HMaster sleeps (in milliseconds) between scans of
+    the root and meta tables.
+    </description>
+  </property>
+  <property>
+    <name>hbase.server.thread.wakefrequency</name>
+    <value>1000</value>
+    <description>Time to sleep in between searches for work (in milliseconds).
+    Used as sleep interval by service threads such as META scanner and log roller.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.handler.count</name>
+    <value>5</value>
+    <description>Count of RPC Server instances spun up on RegionServers
+    Same property is used by the HMaster for count of master handlers.
+    Default is 10.
+    </description>
+  </property>
+  <property>
+    <name>hbase.master.lease.period</name>
+    <value>6000</value>
+    <description>Length of time the master will wait before timing out a region
+    server lease. Since region servers report in every second (see above), this
+    value has been reduced so that the master will notice a dead region server
+    sooner. The default is 30 seconds.
+    </description>
+  </property>
+  <property>
+    <name>hbase.master.info.port</name>
+    <value>-1</value>
+    <description>The port for the hbase master web UI
+    Set to -1 if you do not want the info server to run.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.info.port</name>
+    <value>-1</value>
+    <description>The port for the hbase regionserver web UI
+    Set to -1 if you do not want the info server to run.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.info.port.auto</name>
+    <value>true</value>
+    <description>Info server auto port bind. Enables automatic port
+    search if hbase.regionserver.info.port is already in use.
+    Enabled for testing to run multiple tests on one machine.
+    </description>
+  </property>
+  <property>
+    <name>hbase.master.lease.thread.wakefrequency</name>
+    <value>3000</value>
+    <description>The interval between checks for expired region server leases.
+    This value has been reduced due to the other reduced values above so that
+    the master will notice a dead region server sooner. The default is 15 seconds.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.optionalcacheflushinterval</name>
+    <value>10000</value>
+    <description>
+    Amount of time to wait since the last time a region was flushed before
+    invoking an optional cache flush. Default 60,000.
+    </description>
+  </property>
+  <property>
+    <name>hbase.regionserver.safemode</name>
+    <value>false</value>
+    <description>
+    Turn on/off safe mode in region server. Always on for production, always off
+    for tests.
+    </description>
+  </property>
+  <property>
+    <name>hbase.hregion.max.filesize</name>
+    <value>67108864</value>
+    <description>
+    Maximum desired file size for an HRegion.  If filesize exceeds
+    value + (value / 2), the HRegion is split in two.  Default: 256M.
+
+    Keep the maximum filesize small so we split more often in tests.
+    </description>
+  </property>
+  <property>
+    <name>hadoop.log.dir</name>
+    <value>${user.dir}/../logs</value>
+  </property>
+  <property>
+    <name>hbase.zookeeper.property.clientPort</name>
+    <value>21810</value>
+    <description>Property from ZooKeeper's config zoo.cfg.
+    The port at which the clients will connect.
+    </description>
+  </property>
+</configuration>

Added: incubator/hama/trunk/src/test/log4j.properties
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/log4j.properties?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/log4j.properties (added)
+++ incubator/hama/trunk/src/test/log4j.properties Wed Oct  7 12:04:03 2009
@@ -0,0 +1,47 @@
+# Define some default values that can be overridden by system properties
+hbase.root.logger=INFO,console
+hbase.log.dir=.
+hbase.log.file=hbase.log
+
+# Define the root logger to the system property "hbase.root.logger".
+log4j.rootLogger=${hbase.root.logger}
+
+# Logging Threshold
+log4j.threshhold=ALL
+
+#
+# Daily Rolling File Appender
+#
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.DRFA.File=${hbase.log.dir}/${hbase.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d %-5p [%t] %C{2}(%L): %m%n
+
+
+#
+# console
+# Add "console" to rootlogger above if you want to use this 
+#
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d %-5p [%t] %C{2}(%L): %m%n
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+
+log4j.logger.org.apache.hadoop=WARN
+log4j.logger.org.apache.zookeeper=ERROR
+log4j.logger.org.apache.hadoop.hbase=DEBUG

Added: incubator/hama/trunk/src/test/org/apache/hama/HamaCluster.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/HamaCluster.java?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/HamaCluster.java (added)
+++ incubator/hama/trunk/src/test/org/apache/hama/HamaCluster.java Wed Oct  7 12:04:03 2009
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama;
+
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+
+/**
+ * Forming up the miniDfs and miniHbase
+ */
+public abstract class HamaCluster extends HBaseClusterTestCase {
+  protected final static HamaConfiguration conf = new HamaConfiguration();
+  public void setUp() throws Exception {
+    super.setUp();
+  }
+  
+  public static HamaConfiguration getConf() {
+    return conf;
+  }
+}

Added: incubator/hama/trunk/src/test/org/apache/hama/TestHamaAdmin.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/TestHamaAdmin.java?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/TestHamaAdmin.java (added)
+++ incubator/hama/trunk/src/test/org/apache/hama/TestHamaAdmin.java Wed Oct  7 12:04:03 2009
@@ -0,0 +1,123 @@
+package org.apache.hama;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hama.matrix.DenseMatrix;
+import org.apache.hama.matrix.Matrix;
+import org.apache.log4j.Logger;
+
+public class TestHamaAdmin extends HamaCluster {
+  static final Logger LOG = Logger.getLogger(TestHamaAdmin.class);
+  private int SIZE = 10;
+  private Matrix m1;
+  private Matrix m2;
+  private final String aliase1 = "matrix_aliase_A";
+  private final String aliase2 = "matrix_aliase_B";
+  private HamaConfiguration conf;
+  private HBaseAdmin admin;
+  private HamaAdmin hamaAdmin;
+
+  /**
+   * @throws UnsupportedEncodingException
+   */
+  public TestHamaAdmin() throws UnsupportedEncodingException {
+    super();
+  }
+
+  public void setUp() throws Exception {
+    super.setUp();
+
+    conf = getConf();
+    admin = new HBaseAdmin(conf);
+    hamaAdmin = new HamaAdminImpl(conf, admin);
+
+    m1 = DenseMatrix.random(conf, SIZE, SIZE);
+    m2 = DenseMatrix.random(conf, SIZE, SIZE);
+  }
+  
+  public void testLoadSave() throws IOException {
+    String path1 = m1.getPath();
+    // save m1 to aliase1
+    m1.save(aliase1);
+    // load matrix m1 using aliase1
+    DenseMatrix loadTest = new DenseMatrix(conf, aliase1, false);
+
+    for (int i = 0; i < SIZE; i++) {
+      for (int j = 0; j < SIZE; j++) {
+        assertEquals(m1.get(i, j), loadTest.get(i, j));
+      }
+    }
+
+    assertEquals(path1, loadTest.getPath());
+    // close loadTest, it just disconnect to the table but didn't delete it.
+    loadTest.close();
+
+    // try to close m1 & load matrix m1 using aliase1 again.
+    m1.close();
+    DenseMatrix loadTest2 = new DenseMatrix(conf, aliase1, false);
+    assertEquals(path1, loadTest2.getPath());
+    // remove aliase1
+    // because loadTest2 connect the aliase1, so we just remove aliase entry
+    // but didn't delete the table.
+    hamaAdmin.delete(aliase1);
+    assertEquals(true, admin.tableExists(path1));
+    // close loadTest2, because it is the last one who reference table 'path1'
+    // it will do the gc!
+    loadTest2.close();
+    assertEquals(false, admin.tableExists(path1));
+
+    // if we try to load non-existed matrix using aliase name, it should fail.
+    DenseMatrix loadTest3 = null;
+    try {
+      loadTest3 = new DenseMatrix(conf, aliase1, false);
+      fail("Try to load a non-existed matrix should fail!");
+    } catch (IOException e) {
+
+    } finally {
+      if (loadTest3 != null)
+        loadTest3.close();
+    }
+    
+    forceCreate();
+  }
+
+  public void forceCreate() throws IOException {
+    String path2 = m2.getPath();
+    // save m2 to aliase2
+    m2.save(aliase2);
+    // load matrix m2 using aliase2
+    DenseMatrix loadTest = new DenseMatrix(conf, aliase2, false);
+
+    for (int i = 0; i < loadTest.getRows(); i++) {
+      for (int j = 0; j < loadTest.getColumns(); j++) {
+        assertEquals(m2.get(i, j), loadTest.get(i, j));
+      }
+    }
+
+    assertEquals(path2, loadTest.getPath());
+
+    Matrix test = hamaAdmin.getMatrix(aliase2);
+    assertEquals(test.getType(), "DenseMatrix");
+
+    // force to create matrix loadTest2 using aliasename 'aliase2'
+    DenseMatrix loadTest2 = new DenseMatrix(conf, aliase2, true);
+    String loadPath2 = loadTest2.getPath();
+    assertFalse(path2.equals(loadPath2));
+    assertEquals(loadPath2, hamaAdmin.getPath(aliase2));
+    assertFalse(path2.equals(hamaAdmin.getPath(aliase2)));
+
+    // try to close m2 & loadTest, it table will be deleted finally
+    m2.close();
+    assertEquals(true, admin.tableExists(path2));
+    loadTest.close();
+    assertEquals(false, admin.tableExists(path2));
+
+    // remove 'aliase2' & close loadTest2
+    loadTest2.close();
+    assertEquals(true, admin.tableExists(loadPath2));
+    hamaAdmin.delete(aliase2);
+    assertEquals(false, admin.tableExists(loadPath2));
+  }
+}

Added: incubator/hama/trunk/src/test/org/apache/hama/TestHbaseClient.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/TestHbaseClient.java?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/TestHbaseClient.java (added)
+++ incubator/hama/trunk/src/test/org/apache/hama/TestHbaseClient.java Wed Oct  7 12:04:03 2009
@@ -0,0 +1,69 @@
+package org.apache.hama;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.hadoop.hbase.HBaseClusterTestCase;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.util.Bytes;
+
+public class TestHbaseClient extends HBaseClusterTestCase {
+
+  private static final byte[] FAMILY = Bytes.toBytes("family");
+  private static final byte[] ROW = Bytes.toBytes("row");
+  private static final byte[] QUALIFIER = Bytes.toBytes("qualifier");
+  private static final byte[] VALUE = Bytes.toBytes("value");
+  private static final byte[] MISSING_ROW = Bytes.toBytes("missingrow");
+
+  private HTableDescriptor desc = null;
+  private HTable table = null;
+
+  /**
+   * @throws UnsupportedEncodingException
+   */
+  public TestHbaseClient() throws UnsupportedEncodingException {
+    super();
+  }
+
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    this.desc = new HTableDescriptor("testGet");
+    desc.addFamily(new HColumnDescriptor(FAMILY));
+    HBaseAdmin admin = new HBaseAdmin(conf);
+    admin.createTable(desc);
+    table = new HTable(conf, desc.getName());
+  }
+
+  public void testGet_EmptyTable() throws IOException {
+    Get get = new Get(ROW);
+    get.addFamily(FAMILY);
+    Result r = table.get(get);
+    assertTrue(r.isEmpty());
+  }
+
+  public void testGet_NonExistentRow() throws IOException {
+    Put put = new Put(ROW);
+    put.add(FAMILY, QUALIFIER, VALUE);
+    table.put(put);
+    System.out.println("Row put");
+
+    Get get = new Get(ROW);
+    get.addFamily(FAMILY);
+    Result r = table.get(get);
+    assertFalse(r.isEmpty());
+    System.out.println("Row retrieved successfully");
+
+    get = new Get(MISSING_ROW);
+    get.addFamily(FAMILY);
+    r = table.get(get);
+    assertTrue(r.isEmpty());
+    System.out.println("Row missing as it should be");
+  }
+}

Modified: incubator/hama/trunk/src/test/org/apache/hama/graph/TestGraph.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/graph/TestGraph.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/graph/TestGraph.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/graph/TestGraph.java Wed Oct  7 12:04:03 2009
@@ -20,38 +20,32 @@
 package org.apache.hama.graph;
 
 import java.io.IOException;
+import java.io.UnsupportedEncodingException;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.extensions.TestSetup;
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
+import org.apache.hama.HamaCluster;
 import org.apache.hama.HamaConfiguration;
-import org.apache.hama.matrix.HCluster;
 import org.apache.log4j.Logger;
 
-public class TestGraph extends TestCase {
+public class TestGraph extends HamaCluster {
   static final Logger LOG = Logger.getLogger(TestGraph.class);
   private static HamaConfiguration conf;
   private static Graph adj;
   private static int[] result = new int[] { 4, 3, 2, 0, 1 };
 
-  public static Test suite() {
-    TestSetup setup = new TestSetup(new TestSuite(TestGraph.class)) {
-      protected void setUp() throws Exception {
-        HCluster hCluster = new HCluster();
-        hCluster.setUp();
 
-        conf = hCluster.getConf();
-        adj = new SparseGraph(conf);
-      }
+  /**
+   * @throws UnsupportedEncodingException
+   */
+  public TestGraph() throws UnsupportedEncodingException {
+    super();
+  }
 
-      protected void tearDown() {
-      }
-    };
-    return setup;
+  public void setUp() throws Exception {
+    super.setUp();
+    conf = getConf();
+    adj = new SparseGraph(conf);
   }
 
   public void testAddEdge() throws IOException {

Modified: incubator/hama/trunk/src/test/org/apache/hama/mapred/TestBlockMatrixMapReduce.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/mapred/TestBlockMatrixMapReduce.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/mapred/TestBlockMatrixMapReduce.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/mapred/TestBlockMatrixMapReduce.java Wed Oct  7 12:04:03 2009
@@ -1,60 +1,60 @@
-/**
- * Copyright 2007 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hama.mapred;
-
-import java.io.IOException;
-
-import org.apache.hama.matrix.DenseMatrix;
-import org.apache.hama.matrix.HCluster;
-import org.apache.log4j.Logger;
-
-public class TestBlockMatrixMapReduce extends HCluster {
-  static final Logger LOG = Logger.getLogger(TestBlockMatrixMapReduce.class);
-  static final int SIZE = 32;
-
-  /** constructor */
-  public TestBlockMatrixMapReduce() {
-    super();
-  }
-
-  public void testBlockMatrixMapReduce() throws IOException,
-      ClassNotFoundException {
-    DenseMatrix m1 = DenseMatrix.random(conf, SIZE, SIZE);
-    DenseMatrix m2 = DenseMatrix.random(conf, SIZE, SIZE);
-
-    DenseMatrix c = (DenseMatrix) m1.mult(m2, 16);
-
-    double[][] mem = new double[SIZE][SIZE];
-    for (int i = 0; i < SIZE; i++) {
-      for (int j = 0; j < SIZE; j++) {
-        for (int k = 0; k < SIZE; k++) {
-          mem[i][k] += m1.get(i, j) * m2.get(j, k);
-        }
-      }
-    }
-
-    for (int i = 0; i < SIZE; i++) {
-      for (int j = 0; j < SIZE; j++) {
-        double gap = (mem[i][j] - c.get(i, j));
-        assertTrue(gap < 0.000001 || gap < -0.000001);
-      }
-    }
-  }
-}
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hama.mapred;
+
+import java.io.IOException;
+
+import org.apache.hama.HamaCluster;
+import org.apache.hama.matrix.DenseMatrix;
+import org.apache.log4j.Logger;
+
+public class TestBlockMatrixMapReduce extends HamaCluster {
+  static final Logger LOG = Logger.getLogger(TestBlockMatrixMapReduce.class);
+  static final int SIZE = 32;
+
+  /** constructor */
+  public TestBlockMatrixMapReduce() {
+    super();
+  }
+
+  public void testBlockMatrixMapReduce() throws IOException,
+      ClassNotFoundException {
+    DenseMatrix m1 = DenseMatrix.random(conf, SIZE, SIZE);
+    DenseMatrix m2 = DenseMatrix.random(conf, SIZE, SIZE);
+
+    DenseMatrix c = (DenseMatrix) m1.mult(m2, 16);
+
+    double[][] mem = new double[SIZE][SIZE];
+    for (int i = 0; i < SIZE; i++) {
+      for (int j = 0; j < SIZE; j++) {
+        for (int k = 0; k < SIZE; k++) {
+          mem[i][k] += m1.get(i, j) * m2.get(j, k);
+        }
+      }
+    }
+
+    for (int i = 0; i < SIZE; i++) {
+      for (int j = 0; j < SIZE; j++) {
+        double gap = (mem[i][j] - c.get(i, j));
+        assertTrue(gap < 0.000001 || gap < -0.000001);
+      }
+    }
+  }
+}

Modified: incubator/hama/trunk/src/test/org/apache/hama/mapred/TestRandomMatrixMapReduce.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/mapred/TestRandomMatrixMapReduce.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/mapred/TestRandomMatrixMapReduce.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/mapred/TestRandomMatrixMapReduce.java Wed Oct  7 12:04:03 2009
@@ -21,12 +21,12 @@
 
 import java.io.IOException;
 
+import org.apache.hama.HamaCluster;
 import org.apache.hama.matrix.DenseMatrix;
-import org.apache.hama.matrix.HCluster;
 import org.apache.hama.matrix.SparseMatrix;
 import org.apache.log4j.Logger;
 
-public class TestRandomMatrixMapReduce extends HCluster {
+public class TestRandomMatrixMapReduce extends HamaCluster {
   static final Logger LOG = Logger.getLogger(TestRandomMatrixMapReduce.class);
   
   public void testRandomMatrixMapReduce() throws IOException {

Modified: incubator/hama/trunk/src/test/org/apache/hama/matrix/MatrixTestCommon.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/matrix/MatrixTestCommon.java?rev=822682&r1=822681&r2=822682&view=diff
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/matrix/MatrixTestCommon.java (original)
+++ incubator/hama/trunk/src/test/org/apache/hama/matrix/MatrixTestCommon.java Wed Oct  7 12:04:03 2009
@@ -1,74 +1,74 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hama.matrix;
-
-import java.io.IOException;
-
-class MatrixTestCommon {
-
-  static double verifyNorm1(Matrix m1) throws IOException {
-    double[] colSum = new double[m1.getColumns()];
-    for (int j = 0; j < m1.getColumns(); j++) {
-      for (int i = 0; i < m1.getRows(); i++) {
-        colSum[j] += Math.abs(m1.get(i, j));
-      }
-    }
-    
-    double max = 0;
-    for (int i=0; i < colSum.length; i++) {
-      max = Math.max(colSum[i], max);
-    }
-    return max;
-  }
-  
-  static double verifyNormInfinity(Matrix m1) throws IOException {
-    double[] rowSum = new double[m1.getRows()];
-    for (int i = 0; i < m1.getRows(); i++) {
-      for (int j = 0; j < m1.getColumns(); j++) {
-        rowSum[i] += Math.abs(m1.get(i, j));
-      }
-    }
-
-    double max = 0;
-    for (int i = 0; i < rowSum.length; ++i)
-      max = Math.max(rowSum[i], max);
-    return max;
-  }
-  
-  static double verifyNormMaxValue(Matrix m1) throws IOException {
-    double max = 0;
-    for (int i = 0; i < m1.getRows(); i++) {
-      for (int j = 0; j < m1.getColumns(); j++) {
-        max = Math.max(Math.abs(m1.get(i, j)), max);
-      }
-    }
-    
-    return max;
-  }
-  
-  static double verifyNormFrobenius(Matrix m1) throws IOException {
-    double sqrtSum = 0;
-    for (int i = 0; i < m1.getRows(); i++) {
-      for (int j = 0; j < m1.getColumns(); j++) {
-        double cellValue = m1.get(i, j);
-        sqrtSum += ( cellValue * cellValue );
-      }
-    } 
-    return Math.sqrt(sqrtSum);
-  }
-  
-}
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hama.matrix;
+
+import java.io.IOException;
+
+class MatrixTestCommon {
+
+  static double verifyNorm1(Matrix m1) throws IOException {
+    double[] colSum = new double[m1.getColumns()];
+    for (int j = 0; j < m1.getColumns(); j++) {
+      for (int i = 0; i < m1.getRows(); i++) {
+        colSum[j] += Math.abs(m1.get(i, j));
+      }
+    }
+
+    double max = 0;
+    for (int i = 0; i < colSum.length; i++) {
+      max = Math.max(colSum[i], max);
+    }
+    return max;
+  }
+
+  static double verifyNormInfinity(Matrix m1) throws IOException {
+    double[] rowSum = new double[m1.getRows()];
+    for (int i = 0; i < m1.getRows(); i++) {
+      for (int j = 0; j < m1.getColumns(); j++) {
+        rowSum[i] += Math.abs(m1.get(i, j));
+      }
+    }
+
+    double max = 0;
+    for (int i = 0; i < rowSum.length; ++i)
+      max = Math.max(rowSum[i], max);
+    return max;
+  }
+
+  static double verifyNormMaxValue(Matrix m1) throws IOException {
+    double max = 0;
+    for (int i = 0; i < m1.getRows(); i++) {
+      for (int j = 0; j < m1.getColumns(); j++) {
+        max = Math.max(Math.abs(m1.get(i, j)), max);
+      }
+    }
+
+    return max;
+  }
+
+  static double verifyNormFrobenius(Matrix m1) throws IOException {
+    double sqrtSum = 0;
+    for (int i = 0; i < m1.getRows(); i++) {
+      for (int j = 0; j < m1.getColumns(); j++) {
+        double cellValue = m1.get(i, j);
+        sqrtSum += (cellValue * cellValue);
+      }
+    }
+    return Math.sqrt(sqrtSum);
+  }
+
+}

Added: incubator/hama/trunk/src/test/org/apache/hama/matrix/TestAbstractMatrix.java
URL: http://svn.apache.org/viewvc/incubator/hama/trunk/src/test/org/apache/hama/matrix/TestAbstractMatrix.java?rev=822682&view=auto
==============================================================================
--- incubator/hama/trunk/src/test/org/apache/hama/matrix/TestAbstractMatrix.java (added)
+++ incubator/hama/trunk/src/test/org/apache/hama/matrix/TestAbstractMatrix.java Wed Oct  7 12:04:03 2009
@@ -0,0 +1,72 @@
+package org.apache.hama.matrix;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+
+import org.apache.hama.HamaCluster;
+import org.apache.hama.HamaConfiguration;
+import org.apache.hama.matrix.Matrix.Norm;
+import org.apache.log4j.Logger;
+
+public class TestAbstractMatrix extends HamaCluster {
+  static final Logger LOG = Logger.getLogger(TestAbstractMatrix.class);
+  private int SIZE = 10;
+  private Matrix m1;
+  private Matrix m2;
+  private HamaConfiguration conf;
+  private double gap = 0.000001;
+  
+  /**
+   * @throws UnsupportedEncodingException
+   */
+  public TestAbstractMatrix() throws UnsupportedEncodingException {
+    super();
+  }
+
+  public void setUp() throws Exception {
+    super.setUp();
+
+    conf = getConf();
+    m1 = DenseMatrix.random(conf, SIZE, SIZE);
+    m2 = SparseMatrix.random(conf, SIZE, SIZE);
+  }
+
+  public void testTransposeAndNorm() throws IOException {
+    testTrans(m1);
+    testTrans(m2);
+    
+    normTest(m1);
+    normTest(m2);
+  }
+  
+  public void testTrans(Matrix matrix) throws IOException {
+    Matrix trans1 = matrix.transpose();
+    for (int i = 0; i < trans1.getRows(); i++) {
+      for (int j = 0; j < trans1.getColumns(); j++) {
+        assertEquals(trans1.get(i, j), matrix.get(j, i));
+      }
+    }
+  }
+  
+  public void normTest(Matrix matrix) throws IOException {
+    double norm1 = matrix.norm(Norm.One);
+    double verify_norm1 = MatrixTestCommon.verifyNorm1(matrix);
+    gap = norm1 - verify_norm1;
+    assertTrue(gap < 0.000001 && gap > -0.000001);
+
+    double normInfinity = matrix.norm(Norm.Infinity);
+    double verify_normInf = MatrixTestCommon.verifyNormInfinity(matrix);
+    gap = normInfinity - verify_normInf;
+    assertTrue(gap < 0.000001 && gap > -0.000001);
+
+    double normMaxValue = matrix.norm(Norm.Maxvalue);
+    double verify_normMV = MatrixTestCommon.verifyNormMaxValue(matrix);
+    gap = normMaxValue - verify_normMV;
+    assertTrue(gap < 0.000001 && gap > -0.000001);
+
+    double normFrobenius = matrix.norm(Norm.Frobenius);
+    double verify_normFrobenius = MatrixTestCommon.verifyNormFrobenius(matrix);
+    gap = normFrobenius - verify_normFrobenius;
+    assertTrue(gap < 0.000001 && gap > -0.000001);
+  }
+}