You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by ga...@apache.org on 2012/04/12 10:08:47 UTC

svn commit: r1325183 [1/3] - in /incubator/hcatalog/branches/branch-0.4: ./ bin/ hive/ ivy/ scripts/ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hcatalog/cli/SemanticAnalysis/ src/test/e2e/hcatalog/ src/test/e2e/hcatalog/conf/ src/tes...

Author: gates
Date: Thu Apr 12 10:08:45 2012
New Revision: 1325183

URL: http://svn.apache.org/viewvc?rev=1325183&view=rev
Log:
HCATALOG-358 Remove externed hive code from HCat code and use maven instead

Added:
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/envbased.conf
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/rpm.conf
Removed:
    incubator/hcatalog/branches/branch-0.4/hive/README
    incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy/ivysettings.xml
    incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy/libraries.properties
Modified:
    incubator/hcatalog/branches/branch-0.4/CHANGES.txt
    incubator/hcatalog/branches/branch-0.4/bin/hcat
    incubator/hcatalog/branches/branch-0.4/bin/hcat_server.sh
    incubator/hcatalog/branches/branch-0.4/build-common.xml
    incubator/hcatalog/branches/branch-0.4/build.xml
    incubator/hcatalog/branches/branch-0.4/hive/   (props changed)
    incubator/hcatalog/branches/branch-0.4/ivy.xml
    incubator/hcatalog/branches/branch-0.4/ivy/ivysettings.xml
    incubator/hcatalog/branches/branch-0.4/ivy/libraries.properties
    incubator/hcatalog/branches/branch-0.4/scripts/hcat_server_install.sh
    incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/cli.xml
    incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/install.xml
    incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/site.xml
    incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/build.xml
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/default.conf
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/existing_deployer.conf
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/deployers/HCatExistingClusterDeployer.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHadoop.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHive.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverPig.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/Util.pm
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/tools/generate/generate_data.pl
    incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/udfs/java/build.xml
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/HcatTestUtils.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestPassProperties.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatLoader.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorer.java
    incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatStorerMulti.java
    incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/build.xml
    incubator/hcatalog/branches/branch-0.4/storage-handlers/hbase/ivy.xml

Modified: incubator/hcatalog/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/CHANGES.txt?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/CHANGES.txt (original)
+++ incubator/hcatalog/branches/branch-0.4/CHANGES.txt Thu Apr 12 10:08:45 2012
@@ -31,6 +31,8 @@ Release 0.4.1 - Unreleased
 Release 0.4.0 - Unreleased
 
   INCOMPATIBLE CHANGES
+  HCAT-358 Remove externed hive code from HCat code and use maven instead (thw and gates via gates)
+
   HCAT-359 hcatalog tar.gz should only have hcatalog binaries (gkesavan via gates)
 
   HCAT-267 rename 64 bit rpm/deb package (gkesavan via gates)

Modified: incubator/hcatalog/branches/branch-0.4/bin/hcat
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/bin/hcat?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/bin/hcat (original)
+++ incubator/hcatalog/branches/branch-0.4/bin/hcat Thu Apr 12 10:08:45 2012
@@ -106,7 +106,7 @@ fi
 HCAT_JAR=`ls $HCAT_PREFIX/share/hcatalog/hcatalog-[0-9]*.jar`
 
 # Find the storage-handler jars.
-for jar in ${HCAT_PREFIX}/lib/*.jar ; do
+for jar in ${HCAT_PREFIX}/share/hcatalog/lib/*.jar ; do
 	HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$jar
 done
 

Modified: incubator/hcatalog/branches/branch-0.4/bin/hcat_server.sh
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/bin/hcat_server.sh?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/bin/hcat_server.sh (original)
+++ incubator/hcatalog/branches/branch-0.4/bin/hcat_server.sh Thu Apr 12 10:08:45 2012
@@ -1,25 +1,27 @@
 #!/usr/bin/env bash
 
 # Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file                                   
-# distributed with this work for additional information                                          
-# regarding copyright ownership.  The ASF licenses this file                                     
-# to you under the Apache License, Version 2.0 (the                                              
-# "License"); you may not use this file except in compliance                                     
-# with the License.  You may obtain a copy of the License at                                     
-#                                                                                                
-# http://www.apache.org/licenses/LICENSE-2.0                                                     
-#                                                                                                
-# Unless required by applicable law or agreed to in writing, software                            
-# distributed under the License is distributed on an "AS IS" BASIS,                              
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.                       
-# See the License for the specific language governing permissions and                            
-# limitations under the License. 
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
 bin=`which $0`
 bin=`dirname ${bin}`
 bin=`cd "$bin"; pwd`
 
+HCAT_LOG_DIR="$bin"/../var/log
+
 if [ -e "$bin/../libexec/hcat-config.sh" ]; then
   . "$bin"/../libexec/hcat-config.sh
 else
@@ -34,7 +36,7 @@ function print_usage() {
 
 function start_hcat() {
   # back ground the metastore service and record the pid
-  PID_FILE=${HCAT_PID_DIR}/hcat.pid
+  PID_FILE=${HCAT_LOG_DIR}/hcat.pid
   SLEEP_TIME_AFTER_START=15
 
   # check if service is already running, if so exit
@@ -45,7 +47,7 @@ function start_hcat() {
     exit 1
   fi
 
-  HIVE_SITE_XML=${HCAT_CONF_DIR}/hive-site.xml
+  HIVE_SITE_XML=${HIVE_HOME}/conf/hive-site.xml
   if [ ! -e $HIVE_SITE_XML ]
   then
     echo "Missing hive-site.xml, expected at [$HIVE_SITE_XML]";
@@ -72,6 +74,10 @@ function start_hcat() {
     AUX_CLASSPATH=${AUX_CLASSPATH}:$f
   done
 
+  for f in ${HCAT_PREFIX}/share/hcatalog/*.jar ; do
+    AUX_CLASSPATH=${AUX_CLASSPATH}:$f
+  done
+
   # echo AUX_CLASSPATH = ${AUX_CLASSPATH}
   export AUX_CLASSPATH=${AUX_CLASSPATH}
 
@@ -80,7 +86,7 @@ function start_hcat() {
   export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${HCAT_LOG_DIR}/hcat_err_pid%p.log -Xloggc:${HCAT_LOG_DIR}/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps"
   export HADOOP_HEAPSIZE=2048 # 8G is better if you have it
   export METASTORE_PORT=${METASTORE_PORT}
-  nohup ${HCAT_PREFIX}/bin/hive --service metastore >${HCAT_LOG_DIR}/hcat.out 2>${HCAT_LOG_DIR}/hcat.err &
+  nohup ${HIVE_HOME}/bin/hive --service metastore >${HCAT_LOG_DIR}/hcat.out 2>${HCAT_LOG_DIR}/hcat.err &
 
   PID=$!
 
@@ -105,7 +111,7 @@ function start_hcat() {
 function stop_hcat() {
   SLEEP_TIME_AFTER_KILL=30
 
-  PID_FILE=${HCAT_PID_DIR}/hcat.pid
+  PID_FILE=${HCAT_LOG_DIR}/hcat.pid
   echo looking for $PID_FILE
 
   # check if service is already running, if so exit

Modified: incubator/hcatalog/branches/branch-0.4/build-common.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/build-common.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/build-common.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/build-common.xml Thu Apr 12 10:08:45 2012
@@ -30,23 +30,10 @@
 
     <!-- common classpaths for various builds -->
     <path id="common.classpath">
-        <fileset dir="${hive.root}" includes="testlibs/*.jar"/>
-        <fileset dir="${hive.root}/lib" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/builtins" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/cli" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/common" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/serde" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/metastore" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/ql" includes="*.jar"/>
-        <fileset dir="${hive.root}/build/hadoopcore/hadoop-${hadoop.version}/">
-          <include name="**/hadoop-*.jar" />
-          <exclude name="**/*test*.jar" />
-          <!-- below is for 0.23 onwards -->
-          <!--include name="share/hadoop/common/lib/*.jar" /-->
-          <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
-          <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+        <fileset dir="${common.ivy.lib.dir}">
+          <include name="**/*.jar" />
         </fileset>
-        <fileset dir="${common.ivy.lib.dir}" includes="*.jar"/>
+        <!--
         <fileset dir="${hive.root}/build/ivy/lib/default" includes="antlr-3.0.1.jar"/>
         <fileset dir="${hive.root}/build/ivy/lib/default" includes="commons-lang-*.jar"/>
         <fileset dir="${hive.root}/build/ivy/lib/default" includes="commons-logging-*.jar"/>
@@ -56,5 +43,6 @@
         <fileset dir="${hive.root}/build/ivy/lib/default" includes="jdo2-api-*.jar"/>
         <fileset dir="${hive.root}/build/ivy/lib/default" includes="libfb303-*.jar"/>
         <fileset dir="${hive.root}/lib" includes="asm-3.1.jar"/>
+        -->
     </path>
 </project>

Modified: incubator/hcatalog/branches/branch-0.4/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/build.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/build.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/build.xml Thu Apr 12 10:08:45 2012
@@ -44,7 +44,6 @@
   <property name="package.dir"  location="${basedir}/src/packages"/>
   <property name="docs.src" value="${basedir}/src/docs"/>
   <property name="build.dir" value="${basedir}/build"/>
-  <property name="artifacts.dir" value="${basedir}/artifacts"/>
   <property name="build.classes" value="${build.dir}/classes" />
   <property name="build.docs" value="${build.dir}/docs" />
   <property name="build.javadoc" value="${build.docs}/api" />
@@ -69,7 +68,6 @@
   <property name="test.all.file" value="${test.src.dir}/all-tests"/>
   <property name="test.exclude.file" value="${test.src.dir}/excluded-tests"/>
   <property name="test.output" value="no"/>
-  <property name="hive.conf.dir" value="${hive.root}/conf"/>
   <property name="test.warehouse.dir" value="/tmp/hcat_junit_warehouse"/>
   <property name="test.excludes" value="${test.src.dir}/e2e/**"/>
 
@@ -150,23 +148,10 @@
     <pathelement location="${test.build.classes}" />
     <pathelement location="${build.classes}" />
     <pathelement location="conf"/>
-    <pathelement location="${hive.conf.dir}"/>
-    <!-- jars Hive depends on -->
-    <fileset dir="${hive.root}/build/ivy/lib/default/">
+    <fileset dir="${ivy.lib.dir}">
       <include name="**/*.jar" />
     </fileset>
-    <!-- jars Hadoop depends on -->
     <pathelement location="${hcatalog.jar}"/>
-    <path refid="classpath"/>
-    <fileset dir="${hive.root}/build/hadoopcore/hadoop-${hadoop.version}/">
-      <include name="**/hadoop-*.jar" />
-      <include name="lib/**/*.jar" />
-      <exclude name="lib/**/excluded/" />
-      <!-- below is for 0.23 onwards -->
-      <include name="share/hadoop/common/lib/*.jar" />
-      <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
-      <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
-    </fileset>
   </path>
 
   <!--
@@ -226,6 +211,10 @@
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
       pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="common"/>
     <ivy:cachepath pathid="compile.classpath" conf="common"/>     
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="default"/>
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}" conf="default"/>
+    <ivy:cachepath pathid="compile.classpath" conf="default"/>     
   </target>
 
   <target name="ivy-package" depends="ivy-init"
@@ -249,25 +238,13 @@
     <mkdir dir="${test.build.classes}" />
   </target>
 
-
-  <!-- Build the external hive code -->
-  <target name="hive.jar">
-    <echo message="Building hive with hadoop.version ${hadoop.version}" />
-    <local name="param.shims.include"/>
-    <expandToProperty name="param.shims.include" value="shims.${shims.name}.hive.shims.include"/>
-    <ant antfile="build.xml" dir="${hive.root}" target="package" useNativeBasedir='true'>
-       <property name="shims.include" value="${param.shims.include}"/>
-       <property name="hadoop.version" value="${hadoop.version}"/>
-    </ant>
-  </target>
-
   <!--
   ================================================================================
   Main Build and Jar Section
   ================================================================================
   -->
   <!-- Build HCatalog src files -->
-  <target name="compile-src" depends="hive.jar,init">
+  <target name="compile-src" depends="init">
     <javac encoding="${build.encoding}" srcdir="${src.dir}" excludes="${excludes}"
         includes="**/*.java" destdir="${build.classes}" debug="${javac.debug}"
         optimize="${javac.optimize}" target="${javac.version}"
@@ -395,21 +372,11 @@
   Clean Section
   ================================================================================
   -->
-  <!-- Clean the external hive code -->
-  <target name="hive.clean">
-    <ant antfile="${hive.root}/build.xml" dir="${hive.root}" target="clean"/>
-  </target>
-
   <!-- Clean up children -->
-  <target name="clean-builds" depends="hive.clean" description="Cleanup build">
+  <target name="clean" description="Cleanup all build artifacts">
     <delete dir="${build.dir}" />
     <delete dir="${test.warehouse.dir}"/>
-  </target>
- 
-  <!-- Clean up children -->
-  <target name="clean" depends="clean-builds" description="Cleanup all build artifacts"> 
-    <delete dir="${artifacts.dir}" />
-    <ant dir="${test.e2e.dir}" target="clean"/>
+    <ant target="clean" dir="storage-handlers" inheritAll="false" useNativeBasedir="true"/>
   </target>
  
   <!--
@@ -500,7 +467,37 @@
       <fileset dir="bin">
         <include name="hcat-config.sh"/>
       </fileset>
-      <!-- fileset file="hive/external/bin/hive-config.sh" /-->
+    </copy>
+
+    <copy todir="${dist.dir}/sbin">
+      <fileset dir="${package.dir}">
+        <include name="*.sh"/>
+      </fileset>
+    </copy>
+
+    <copy todir="${dist.dir}/etc/${ant.project.name}">
+      <fileset dir="conf" />
+    </copy>
+
+    <copy todir="${dist.dir}/share/${ant.project.name}/scripts">
+       <fileset dir="scripts">
+         <include name="*.sh"/>
+       </fileset>
+    </copy>
+
+    <copy todir="${dist.dir}/share/${ant.project.name}/templates/conf">
+      <fileset dir="src/packages/templates/conf">
+        <include name="*"/>
+      </fileset>
+    </copy>
+
+    <copy todir="${dist.dir}/sbin">
+      <fileset dir="${package.dir}">
+        <include name="*.sh"/>
+      </fileset>
+      <fileset dir="bin">
+        <include name="hcat_server.sh"/>
+      </fileset>
     </copy>
 
 	<!-- Copy the licenses and such -->
@@ -529,34 +526,16 @@
       </java>
     </target>
 
-    <target name="build-artifacts" depends="jar,docs" description="Build hcatalog artifacts and copy them">
-        <mkdir dir="${artifacts.dir}"/>
-        <copy todir="${artifacts.dir}">
-          <fileset dir="build/hcatalog">
-            <include name="*.jar"/>
-          </fileset>
-        </copy>
-        <mkdir dir="${artifacts.dir}/docs"/>
-        <copy todir="${artifacts.dir}/docs">
-          <fileset dir="build/docs" />
-        </copy>
-    </target>
-
-    <target name="build-artifacts-and-clean" depends="build-artifacts,clean-builds" description="First build artifacts, then clean up"/>
-
-
     <!-- ================================================================== -->
     <!-- Make release tarball                                               -->
     <!-- ================================================================== -->
-    <target name="src-release" depends="build-artifacts-and-clean" description="Source distribution">
+    <target name="src-release" depends="clean" description="Source distribution">
         <mkdir dir="${build.dir}"/>
         <tar compression="gzip" longfile="gnu" destfile="${build.dir}/${ant.project.name}-src-${hcatalog.version}.tar.gz">
             <tarfileset dir="${basedir}" mode="644" prefix="${ant.project.name}-src-${hcatalog.version}">
                 <include name="conf/**"/>
-                <include name="hive/**"/>
                 <include name="ivy/**"/>
                 <exclude name="ivy/*.jar"/>
-                <include name="lib/**"/>
                 <include name="license/**"/>
                 <include name="shims/**"/>
                 <include name="src/**"/>
@@ -569,9 +548,6 @@
                 <include name="scripts/**"/>
                 <include name="bin/**"/>
             </tarfileset>
-            <tarfileset dir="" mode="644" prefix="${ant.project.name}-src-${hcatalog.version}">
-                <include name="artifacts/**"/>
-            </tarfileset>
         </tar>
     </target> 
 
@@ -594,132 +570,6 @@
         </tar>
     </target>
 
-    <target name="rpm" depends="tar" description="Create rpm package">
-        <mkdir dir="${package.buildroot}/BUILD" />
-        <mkdir dir="${package.buildroot}/RPMS" />
-        <mkdir dir="${package.buildroot}/SRPMS" />
-        <mkdir dir="${package.buildroot}/SOURCES" />
-        <mkdir dir="${package.buildroot}/SPECS" />
-        <copy todir="${package.buildroot}/SOURCES">
-            <fileset dir="${build.dir}">
-                <include name="${final.name}.tar.gz" />
-            </fileset>
-        </copy>
-        <copy file="${package.dir}/rpm/spec/hcatalog.spec" todir="${package.buildroot}/SPECS">
-            <filterchain>
-                <replacetokens>
-                    <token key="final.name" value="${final.name}" />
-                    <token key="version" value="${hcatalog.version}" />
-                    <token key="package.release" value="${package.release}" />
-                    <token key="package.build.dir" value="${package.build.dir}" />
-                    <token key="package.prefix" value="${package.prefix}" />
-                    <token key="package.conf.dir" value="${package.conf.dir}" />
-                    <token key="package.log.dir" value="${package.log.dir}" />
-                    <token key="package.pid.dir" value="${package.pid.dir}" />
-                    <token key="package.var.dir" value="${package.var.dir}" />
-                </replacetokens>
-            </filterchain>
-        </copy>
-        <rpm specFile="hcatalog.spec" command="-bb --target ${os-arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
-        <copy todir="${build.dir}/" flatten="true">
-            <fileset dir="${package.buildroot}/RPMS">
-                <include name="**/*.rpm" />
-            </fileset>
-        </copy>
-        <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-    </target>
-
-    <target name="deb" depends="tar" description="Create debian package">
-        <taskdef name="deb" classname="org.vafer.jdeb.ant.DebAntTask">
-            <classpath refid="package.classpath" />
-        </taskdef>
-        <mkdir dir="${package.build.dir}/hcatalog.control" />
-        <mkdir dir="${package.build.dir}/hcatalog-server.control" />
-        <mkdir dir="${package.buildroot}/${package.prefix}" />
-        <copy todir="${package.buildroot}/${package.prefix}">
-            <fileset dir="${build.dir}/${final.name}">
-                <include name="**" />
-            </fileset>
-        </copy>
-        <copy todir="${package.build.dir}/hcatalog.control">
-            <fileset dir="${package.dir}/deb/hcatalog.control">
-                <exclude name="control" />
-                <exclude name="server.control" />
-            </fileset>
-        </copy>
-        <copy file="${package.dir}/deb/hcatalog.control/control" todir="${package.build.dir}/hcatalog.control">
-            <filterchain>
-                <replacetokens>
-                    <token key="final.name" value="${final.name}" />
-                    <token key="version" value="${hcatalog.version}" />
-                    <token key="package.release" value="${package.release}" />
-                    <token key="package.build.dir" value="${package.build.dir}" />
-                    <token key="package.prefix" value="${package.prefix}" />
-                    <token key="package.conf.dir" value="${package.conf.dir}" />
-                    <token key="package.log.dir" value="${package.log.dir}" />
-                    <token key="package.pid.dir" value="${package.pid.dir}" />
-                </replacetokens>
-            </filterchain>
-        </copy>
-        <copy file="${package.dir}/deb/hcatalog.control/server.control" tofile="${package.build.dir}/hcatalog-server.control/control">
-            <filterchain>
-                <replacetokens>
-                    <token key="final.name" value="${final.name}" />
-                    <token key="version" value="${_version}" />
-                    <token key="package.release" value="${package.release}" />
-                    <token key="package.build.dir" value="${package.build.dir}" />
-                    <token key="package.prefix" value="${package.prefix}" />
-                    <token key="package.conf.dir" value="${package.conf.dir}" />
-                    <token key="package.log.dir" value="${package.log.dir}" />
-                    <token key="package.pid.dir" value="${package.pid.dir}" />
-                </replacetokens>
-            </filterchain>
-        </copy>
-        <deb destfile="${package.buildroot}/${ant.project.name}_${_version}-${package.release}_${os-arch}.deb" control="${package.build.dir}/hcatalog.control">
-            <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
-                <exclude name="bin" />
-                <exclude name="etc" />
-                <exclude name="etc/**" />
-                <exclude name="sbin" />
-                <exclude name="sbin/**" />
-                <exclude name="share/hcatalog/hcatalog-server*" />
-                <include name="**" />
-            </tarfileset>
-            <tarfileset dir="${build.dir}/${final.name}/etc/hcatalog" filemode="644" prefix="${package.conf.dir}">
-                <include name="**" />
-            </tarfileset>
-            <tarfileset dir="${build.dir}/${final.name}/bin" filemode="755" prefix="${package.prefix}/bin">
-                <include name="**" />
-            </tarfileset>
-            <tarfileset dir="${build.dir}/${final.name}/sbin" filemode="755" prefix="${package.prefix}/sbin">
-                <include name="update-hcatalog-env.sh" />
-            </tarfileset>
-        </deb>
-        <copy todir="${build.dir}/" flatten="true">
-            <fileset dir="${package.buildroot}">
-                <include name="**/hcatalog*.deb" />
-            </fileset>
-        </copy>
-        <deb destfile="${package.buildroot}/${ant.project.name}-server_${_version}-${package.release}_${os-arch}.deb" control="${package.build.dir}/hcatalog-server.control">
-            <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
-                <include name="share/hcatalog/hcatalog-server*" />
-            </tarfileset>
-            <tarfileset dir="${build.dir}/${final.name}/sbin" filemode="755" prefix="${package.prefix}/sbin">
-                <include name="hcat_server.sh" />
-            </tarfileset>
-            <tarfileset dir="${basedir}/src/packages/deb/init.d" filemode="755" prefix="/etc/init.d">
-                <include name="**" />
-            </tarfileset>
-
-        </deb>
-        <copy todir="${build.dir}/" flatten="true">
-            <fileset dir="${package.buildroot}">
-                <include name="**/hcatalog*.deb" />
-            </fileset>
-        </copy>
-        <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
-    </target>
-
     <!-- ================================================================== -->
     <!-- End to end tests                                                   -->
     <!-- ================================================================== -->

Propchange: incubator/hcatalog/branches/branch-0.4/hive/
------------------------------------------------------------------------------
--- svn:externals (original)
+++ svn:externals Thu Apr 12 10:08:45 2012
@@ -1 +1 @@
-http://svn.apache.org/repos/asf/hive/branches/branch-0.8-r2 external
+

Modified: incubator/hcatalog/branches/branch-0.4/ivy.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/ivy.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/ivy.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/ivy.xml Thu Apr 12 10:08:45 2012
@@ -37,21 +37,92 @@
         <conf name="releaseaudit" visibility="private"/>
     </configurations>
     <dependencies>
+        <!-- needed to compile -->
+        <dependency org="org.antlr" name="antlr" rev="${antlr.version}"
+          conf="common->master" />
+        <dependency org="org.antlr" name="antlr-runtime" rev="${antlr.version}"
+          conf="common->master" />
         <dependency org="org.apache.pig" name="pig" rev="${pig.version}"
           conf="common->master" />
         <dependency org="commons-cli" name="commons-cli" rev="${commons-cli.version}"
           conf="common->master"/>
-        <!-- Removed until there is a secure version of hadoop in maven
-        <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop-core.version}"
+        <dependency org="commons-lang" name="commons-lang" rev="${commons-lang.version}"
+          conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging"
+          rev="${commons-logging.version}" conf="common->master"/>
+        <dependency org="commons-logging" name="commons-logging-api"
+          rev="${commons-logging.version}" conf="common->master"/>
+        <dependency org="org.apache.hadoop" name="hadoop-tools"
+          rev="${hadoop-tools.version}" conf="common->master" />
+        <dependency org="org.apache.hadoop" name="hadoop-core"
+          rev="${hadoop-core.version}" conf="common->master" />
+        <dependency org="org.apache.hadoop" name="hadoop-test"
+          rev="${hadoop-test.version}" conf="common->master" />
+        <dependency org="javax.jms" name="jms" rev="${jms.version}"
+          conf="common->master" />
+        <dependency org="org.apache.activemq" name="activemq-all"
+          rev="${activemq.version}" conf="common->master" />
+        <dependency org="javax.management.j2ee" name="management-api"
+          rev="${javax-mgmt.version}" conf="common->master" /> 
+        <dependency org="com.google.code.p.arat" name="rat-lib"
+          rev="${rats-lib.version}" conf="releaseaudit->default"/>
+        <dependency org="org.vafer" name="jdeb" rev="${jdeb.version}"
+          conf="package->master"/>
+        <dependency org="org.codehaus.jackson" name="jackson-mapper-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.codehaus.jackson" name="jackson-core-asl"
+          rev="${jackson.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-metastore"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-common"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-exec"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-cli"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.hive" name="hive-hbase-handler"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.apache.thrift" name="libfb303" rev="${fb303.version}"
+          conf="common->master"/>
+        <dependency org="junit" name="junit" rev="${junit.version}"
+          conf="common->master"/>
+
+        <!-- needed to run-->
+        <dependency org="org.slf4j" name="slf4j-api" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="org.slf4j" name="slf4j-log4j12" rev="${slf4j.version}"
+          conf="common->master"/>
+        <dependency org="log4j" name="log4j" rev="${log4j.version}"
           conf="common->master"/>
-          -->
-        <dependency org="org.apache.hadoop" name="hadoop-tools" rev="${hadoop-tools.version}" conf="common->master" />
-        <dependency org="javax.jms" name="jms" rev="${jms.version}" conf="common->master" />
-        <dependency org="org.apache.activemq" name="activemq-all" rev="${activemq.version}" conf="common->master" />
-        <dependency org="javax.management.j2ee" name="management-api" rev="${javax-mgmt.version}" conf="common->master" /> 
-        <dependency org="com.google.code.p.arat" name="rat-lib" rev="${rats-lib.version}" conf="releaseaudit->default"/>
-        <dependency org="org.vafer" name="jdeb" rev="${jdeb.version}" conf="package->master"/>
-        <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${jackson.version}" conf="common->master"/>
-        <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${jackson.version}" conf="common->master"/>
+        <dependency org="javax.jdo" name="jdo2-api" rev="${jdo.version}"
+          conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-core"
+          rev="${datanucleus-core.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-connectionpool"
+          rev="${datanucleus-connectionpool.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-enhancer"
+          rev="${datanucleus-enhancer.version}" conf="default"/>
+        <dependency org="org.datanucleus" name="datanucleus-rdbms"
+          rev="${datanucleus-rdbms.version}" conf="default"/>
+        <dependency org="commons-dbcp" name="commons-dbcp" rev="${commons-dbcp.version}"
+            conf="common->master">
+          <exclude module="commons-pool" />
+          <exclude org="org.apache.geronimo.specs" module="geronimo-jta_1.1_spec"/>
+        </dependency>
+        <dependency org="commons-pool" name="commons-pool" rev="${commons-pool.version}"
+          conf="default"/>
+        <dependency org="org.apache.derby" name="derby" rev="${derby.version}"
+          conf="default"/>
+        <dependency org="commons-configuration" name="commons-configuration"
+          rev="${commons-configuration.version}" conf="default"/>
+        <dependency org="commons-httpclient" name="commons-httpclient"
+          rev="${commons-httpclient.version}" conf="default"/>
+        <dependency org="org.apache.hive" name="hive-builtins"
+          rev="${hive.version}" conf="common->master"/>
+        <dependency org="org.mortbay.jetty" name="jetty"
+          rev="${jetty.version}" conf="default"/>
+        <dependency org="org.mortbay.jetty" name="jetty-util"
+          rev="${jetty.version}" conf="default"/>
+
     </dependencies>
 </ivy-module>

Modified: incubator/hcatalog/branches/branch-0.4/ivy/ivysettings.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/ivy/ivysettings.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/ivy/ivysettings.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/ivy/ivysettings.xml Thu Apr 12 10:08:45 2012
@@ -30,12 +30,22 @@
           http://www.ibiblio.net/pub/packages/maven2
   -->
   <property name="repo.maven.org" value="${mvnrepo}" override="true"/>
+  <property name="repo.apache.snapshots" value="http://repository.apache.org/content/groups/snapshots-group/" override="false"/>
+  <property name="repo.dir" value="${user.home}/.m2/repository"/>
   <property name="maven2.pattern"  value="[organisation]/[module]/[revision]/[module]-[revision](-[classifier])"/> 
   <property name="maven2.pattern.ext" value="${maven2.pattern}.[ext]"/>
+  <property name="snapshot.pattern"  value="[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]"/>
+  <property name="resolvers" value="default" override="false"/>
+  <property name="force-resolve" value="false" override="false"/>
   <!-- pull in the local repository -->
   <include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
-  <settings defaultResolver="default"/>
+  <settings defaultResolver="${resolvers}"/>
   <resolvers>
+    <filesystem name="fs" m2compatible="true" checkconsistency="false" force="${force-resolve}"
+         checkmodified="true" changingPattern=".*-SNAPSHOT">
+      <artifact pattern="${repo.dir}/${maven2.pattern.ext}"/>
+      <ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].pom"/>
+    </filesystem>
     <ibiblio
       name="jboss"
       m2compatible="true"
@@ -43,16 +53,29 @@
       pattern="[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]" />
 
     <ibiblio name="maven2" root="${repo.maven.org}" pattern="${maven2.pattern.ext}" m2compatible="true"/>
-    <chain name="default" dual="true">
+    <ibiblio name="apache-snapshots" root="${repo.apache.snapshots}" pattern="${snapshot.pattern}"
+         checkmodified="true" changingPattern=".*-SNAPSHOT" m2compatible="true"/>
+    <url name="datanucleus-repo" m2compatible="true">
+      <artifact pattern="http://www.datanucleus.org/downloads/maven2/[organisation]/[module]/[revision]/[module]-[revision].[ext]"/>
+    </url>
+
+    <chain name="default" dual="true" checkmodified="true" changingPattern=".*-SNAPSHOT">
+      <resolver ref="fs"/>
       <resolver ref="local"/>
       <resolver ref="maven2"/>
+      <resolver ref="datanucleus-repo"/>
       <resolver ref="jboss"/>
+      <resolver ref="apache-snapshots"/>
     </chain>
     <chain name="internal">
-      <resolver ref="local"/>
+      <resolver ref="fs"/>
+      <resolver ref="apache-snapshots"/>
+      <resolver ref="maven2"/>
+      <resolver ref="datanucleus-repo"/>
     </chain>
     <chain name="external">
       <resolver ref="maven2"/>
+      <resolver ref="datanucleus-repo"/>
     </chain>
   </resolvers>
   <modules>

Modified: incubator/hcatalog/branches/branch-0.4/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/ivy/libraries.properties?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/ivy/libraries.properties (original)
+++ incubator/hcatalog/branches/branch-0.4/ivy/libraries.properties Thu Apr 12 10:08:45 2012
@@ -13,15 +13,38 @@
 #This properties file lists the versions of the various artifacts used by hadoop and components.
 #It drives ivy and the generation of a maven POM
 
-junit.version=3.8.1
-ivy.version=2.1.0
-pig.version=0.8.0
-commons-cli.version=1.2
-#hadoop-core.version=0.20.2 Waiting for a secure version of hadoop in maven
-hadoop-tools.version=0.20.205.0
-jms.version=1.1
 activemq.version=5.5.0
+antlr.version=3.0.1
+commons-cli.version=1.2
+commons-configuration.version=1.6
+commons-dbcp.version=1.4
+commons-httpclient.version=3.0.1
+commons-lang.version=2.4
+commons-logging.version=1.0.4
+commons-pool.version=1.5.4
+datanucleus-connectionpool.version=2.0.3
+datanucleus-core.version=2.0.3
+datanucleus-enhancer.version=2.0.3
+datanucleus-rdbms.version=2.0.3
+derby.version=10.4.2.0
+fb303.version=0.7.0
+guava.version=11.0
+hadoop-core.version=1.0.1
+hadoop-test.version=1.0.1
+hadoop-tools.version=1.0.1
+hbase.version=0.92.0
+high-scale-lib.version=1.1.1
+hive.version=0.9.0-SNAPSHOT
+ivy.version=2.1.0
+jackson.version=1.7.3
 javax-mgmt.version=1.1-rev-1
-rats-lib.version=0.5.1
 jdeb.version=0.8
-jackson.version=1.7.3
+jdo.version=2.3-ec
+jetty.version=6.1.26
+jms.version=1.1
+junit.version=4.10
+log4j.version=1.2.16
+pig.version=0.8.0
+rats-lib.version=0.5.1
+slf4j.version=1.6.1
+zookeeper.version=3.4.3

Modified: incubator/hcatalog/branches/branch-0.4/scripts/hcat_server_install.sh
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/scripts/hcat_server_install.sh?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/scripts/hcat_server_install.sh (original)
+++ incubator/hcatalog/branches/branch-0.4/scripts/hcat_server_install.sh Thu Apr 12 10:08:45 2012
@@ -108,7 +108,7 @@ fi
 
 # Create the needed directories in root
 #for dir in var conf var/log bin lib ; do
-for dir in var var/log bin etc libexec sbin share src; do
+for dir in var var/log bin etc libexec sbin share ; do
     if [ ! -d $root/$dir ] ; then
         mkdir $root/$dir
     fi
@@ -117,7 +117,7 @@ done
 # Move files into the appropriate directories
 if [ "$alternate_root" == "y" ] ; then
     echo Installing into [$root]
-    for dir in bin etc libexec sbin share src ; do
+    for dir in bin etc libexec sbin share ; do
         for file in ./$dir/* ; do
             cp -R $file $root/$dir
         done
@@ -136,11 +136,11 @@ ln -sf $root/etc/hcatalog $root/share/hc
 #done
 
 # Move the proto-hive-site.xml to hive-site.xml
-cp $root/etc/hcatalog/proto-hive-site.xml $root/etc/hcatalog/hive-site.xml
+#cp $root/etc/hcatalog/proto-hive-site.xml $root/etc/hcatalog/hive-site.xml
 
 # Set permissions on hive-site.xml to 700, since it will contain the password to the 
 # database
-chmod 700 $root/etc/hcatalog/hive-site.xml
+#chmod 700 $root/etc/hcatalog/hive-site.xml
 
 # Write out an environment file so that the start file can use it later
 cat > $root/etc/hcatalog/hcat-env.sh <<!!

Modified: incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/cli.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/cli.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/cli.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/cli.xml Thu Apr 12 10:08:45 2012
@@ -26,7 +26,13 @@
 <!-- ==================================================================== -->
 <section>
 	<title>Set Up</title>
-<p>The HCatalog command line interface (CLI) can be invoked as <code>hcat</code>. </p>
+<p>The HCatalog command line interface (CLI) can be invoked as
+<code>HIVE_HOME=</code><em>hive_home hcat_home</em><code>bin/hcat</code>
+where <em>hive_home</em> is the directory where Hive has been installed and
+<em>hcat_home</em> is the directory where HCatalog has been installed.</p>
+
+<p>If you are using BigTop's rpms or debs you can invoke the CLI by doing
+<code>/usr/bin/hcat</code>.</p>
 
 
 </section>

Modified: incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/install.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/install.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/install.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/install.xml Thu Apr 12 10:08:45 2012
@@ -82,13 +82,17 @@
 
     <p><strong>Database Setup</strong></p>
 
+    <p>If you do not already have Hive installed with MySQL, the following will
+    walk you through how to do so.  If you have already set this up, you can skip
+    this step.</p>
+
     <p>Select a machine to install the database on.  This need not be the same
     machine as the Thrift server, which we will set up later.  For large
     clusters we recommend that they not be the same machine.  For the 
     purposes of these instructions we will refer to this machine as
-    <em>hcatdb.acme.com</em></p>
+    <em>hivedb.acme.com</em></p>
 
-    <p>Install MySQL server on <em>hcatdb.acme.com</em>.  You can obtain
+    <p>Install MySQL server on <em>hivedb.acme.com</em>.  You can obtain
     packages for MySQL from <a href="http://www.mysql.com/downloads/">MySQL's
     download site</a>.  We have developed and tested with versions 5.1.46
     and 5.1.48.  We suggest you use these versions or later.
@@ -98,22 +102,25 @@
     user, and replace <em>dbpassword</em> in the following commands with it.</p>
 
     <p><code>mysql -u root</code></p>
-    <p><code>mysql> CREATE USER 'hive'@'</code><em>hcatdb.acme.com</em><code>' IDENTIFIED BY '</code><em>dbpassword</em><code>';</code></p>
+    <p><code>mysql> CREATE USER 'hive'@'</code><em>hivedb.acme.com</em><code>' IDENTIFIED BY '</code><em>dbpassword</em><code>';</code></p>
     <p><code>mysql> CREATE DATABASE hivemetastoredb DEFAULT CHARACTER SET latin1 DEFAULT COLLATE latin1_swedish_ci;</code></p>
-    <p><code>mysql> GRANT ALL PRIVILEGES ON hivemetastoredb.* TO 'hive'@'</code><em>hcatdb.acme.com</em><code>' WITH GRANT OPTION;</code></p>
+    <p><code>mysql> GRANT ALL PRIVILEGES ON hivemetastoredb.* TO 'hive'@'</code><em>hivedb.acme.com</em><code>' WITH GRANT OPTION;</code></p>
     <p><code>mysql> flush privileges;</code></p>
     <p><code>mysql> quit;</code></p>
 
-    <p>In a temporary directory, untar the HCatalog installation tarball.</p>
-
-    <p><code>tar xzf hcatalog-0.4.0.tar.gz</code></p>
+    <p>Use the database installation script found in the Hive package to create the
+    database.  <code>hive_home</code> in the line below refers to the directory
+    where you have installed Hive.  If you are using Hive rpms, then this will
+    be <code>/usr/lib/hive</code>.</p>
 
-    <p>Use the database installation script found in the package to create the
-    database.</p>
-    <p><code>mysql -u hive -D hivemetastoredb -h</code><em>hcatdb.acme.com</em><code> -p &lt; share/hcatalog/hive/external/metastore/scripts/upgrade/mysql/hive-schema-0.8.0.mysql.sql</code></p>
+    <p><code>mysql -u hive -D hivemetastoredb -h</code><em>hivedb.acme.com</em><code> -p &lt; </code><em>hive_home</em><code>scripts/metastore/upgrade/mysql/hive-schema-0.9.0.mysql.sql</code></p>
 
     <p><strong>Thrift Server Setup</strong></p>
 
+    <p>If you do not already have Hive running a metastore server using Thrift,
+    you can use the following instructions to setup and run one.  You may skip
+    this step if you already are using a Hive metastore server.</p>
+
     <p>Select a machine to install your Thrift server on.  For smaller and test
     installations this can be the same machine as the database.  For the
     purposes of these instructions we will refer to this machine as
@@ -126,14 +133,14 @@
 
     <p>Select a user to run the Thrift server as.  This user should not be a
     human user, and must be able to act as a proxy for other users.  We suggest
-    the name "hcat" for the user.  Throughout the rest of this documentation 
-    we will refer to this user as <em>hcat</em>.  If necessary, add the user to 
+    the name "hive" for the user.  Throughout the rest of this documentation 
+    we will refer to this user as <em>hive</em>.  If necessary, add the user to 
     <em>hcatsvr.acme.com</em>.</p>
 
     <p>Select a <em>root</em> directory for your installation of HCatalog.  This 
-    directory must be owned by the <em>hcat</em> user.  We recommend
-    <code>/usr/local/hcat</code>.  If necessary, create the directory.  You will
-    need to be the <em>hcat</em> user for the operations described in the remainder
+    directory must be owned by the <em>hive</em> user.  We recommend
+    <code>/usr/local/hive</code>.  If necessary, create the directory.  You will
+    need to be the <em>hive</em> user for the operations described in the remainder
     of this Thrift Server Setup section.</p>
 
     <p>Copy the HCatalog installation tarball into a temporary directory, and untar
@@ -150,7 +157,7 @@
     <p><code>cd hcatalog-0.4.0</code></p>
     <p><code>share/hcatalog/scripts/hcat_server_install.sh -r </code><em>root</em><code> -d </code><em>dbroot</em><code> -h </code><em>hadoop_home</em><code> -p </code><em>portnum</em></p>
 
-    <p>Now you need to edit your <em>root</em><code>/etc/hcatalog/hive-site.xml</code> file.
+    <p>Now you need to edit your <em>hive_home</em><code>/conf/hive-site.xml</code> file.
     Open this file in your favorite text editor.  The following table shows the
     values you need to configure.</p>
 
@@ -160,9 +167,21 @@
             <th>Value to Set it to</th>
         </tr>
         <tr>
+            <td>hive.metastore.local</td>
+            <td>false</td>
+        </tr>
+        <tr>
             <td>javax.jdo.option.ConnectionURL</td>
-            <td>In the JDBC connection string, change DBHOSTNAME to the name 
-            of the machine you put the MySQL server on.</td>
+            <td>jdbc:mysql://<em>hostname</em>/hivemetastoredb?createDatabaseIfNotExist=true where <em>hostname</em> is the name of the machine you installed MySQL on.</td>
+        </tr>
+        <tr>
+            <td>javax.jdo.option.ConnectionDriverName</td>
+            <td>com.mysql.jdbc.Driver</td>
+        </tr>
+
+        <tr>
+            <td>javax.jdo.option.ConnectionUserName</td>
+            <td>hive</td>
         </tr>
         <tr>
             <td>javax.jdo.option.ConnectionPassword</td>
@@ -170,20 +189,30 @@
             above.</td>
         </tr>
         <tr>
+            <td>hive.semantic.analyzer.factory.impl</td>
+            <td>org.apache.hcatalog.cli.HCatSemanticAnalyzerFactory</td>
+        </tr>
+        <tr>
+            <td>hadoop.clientside.fs.operations</td>
+            <td>true</td>
+        </tr>
+        <tr>
             <td>hive.metastore.warehouse.dir</td>
             <td>The directory can be a URI or an absolute file path. If it is an absolute file path, it will be resolved to a URI by the metastore:
             <p>-- If default hdfs was specified in core-site.xml, path resolves to HDFS location. </p>
             <p>-- Otherwise, path is resolved as local file: URI.</p>
             <p>This setting becomes effective when creating new tables (it takes precedence over default DBS.DB_LOCATION_URI at the time of table creation).</p>
+            <p>You only need to set this if you have not yet configured Hive to run on your system.</p>
             </td>
         </tr>
         <tr>
             <td>hive.metastore.uris</td>
-            <td>Set the hostname of your Thrift
-            server by replacing <em>SVRHOST</em> with the name of the
-            machine you are installing the Thrift server on.  You can also
-            change the port the Thrift server runs on by changing the default
-            value of 3306.</td>
+            <td>thrift://<em>hostname</em>:<em>portnum</em> where <em>hostname</em> is the name of the machine hosting the Thrift server, and <em>portnum</em> is the port number
+            used above in the installation script.</td>
+        </tr>
+        <tr>
+            <td>hive.metastore.execute.setugi</td>
+            <td>true</td>
         </tr>
         <tr>
             <td>hive.metastore.sasl.enabled</td>
@@ -220,8 +249,10 @@
   <section>
     <title>Starting the Server</title>
             
-    <p>Start the HCatalog server by switching directories to
-    <em>root</em> and invoking <code>sbin/hcat_server.sh start</code></p>
+    <p>To start your server, HCatalog needs to know where Hive is installed.
+    This is communicated by setting the environment variable <code>HIVE_HOME</code>
+    to the location you installed Hive.  Start the HCatalog server by switching directories to
+    <em>root</em> and invoking <code>HIVE_HOME=</code><em>hive_home</em><code> sbin/hcat_server.sh start</code></p>
 
   </section>
 
@@ -253,40 +284,16 @@
     <p>Copy the HCatalog installation tarball into a temporary directory, and untar
     it.</p>
 
-    <p><code>tar zxf hcatalog-</code><em>version</em><code>.tar.gz</code></p>
-
-    <p>Now you need to edit your <em>root</em><code>/etc/hcatalog/hive-site.xml</code> file.
-    Open this file in your favorite text editor.  The following table shows the
-    values you need to configure.   These values should match the values set on
-    the HCatalog server.  Do <strong>NOT</strong> copy the configuration file
-    from your server installation as that contains the password to your
-    database, which you should not distribute to your clients.</p>
+    <p><code>tar zxf hcatalog-0.4.0.tar.gz</code></p>
 
-    <table>
-        <tr>
-            <th>Parameter</th>
-            <th>Value to Set it to</th>
-        </tr>
-        <tr>
-            <td>hive.metastore.warehouse.dir</td>
-            <td>The directory can be a URI or an absolute file path. If it is an absolute file path, it will be resolved to a URI by the metastore:
-            <p>-- If default hdfs was specified in core-site.xml, path resolves to HDFS location. </p>
-            <p>-- Otherwise, path is resolved as local file: URI.</p>
-            <p>This setting becomes effective when creating new tables (it takes precedence over default DBS.DB_LOCATION_URI at the time of table creation).</p>
-            </td>
-        </tr>
-        <tr>
-            <td>hive.metastore.uris</td>
-            <td>Set the hostname of your Thrift
-            server by replacing <em>SVRHOST</em> with the name of the
-            machine you are installing the Thrift server on.  You can also
-            change the port the Thrift server runs on by changing the default
-            value of 3306.</td>
-        </tr>
-    </table>
+    <p>Now you need to edit your <em>hive_home</em><code>/conf/hive-site.xml</code> file.
+    You can use the same file as on the server <strong>except the value of 
+    </strong><code>javax.jdo.option.ConnectionPasswordh</code><strong> should be
+    removed</strong>.  This avoids having the password available in plain text on
+    all of your clients.</p>
 
     <p>The HCatalog command line interface (CLI) can now be invoked as
-    <em>root</em><code>/bin/hcat</code>.</p>
+    <code>HIVE_HOME=</code><em>hive_home root</em><code>/bin/hcat</code>.</p>
 
   </section>
 

Modified: incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/site.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/site.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/site.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/src/docs/src/documentation/content/xdocs/site.xml Thu Apr 12 10:08:45 2012
@@ -40,7 +40,7 @@ See http://forrest.apache.org/docs/linki
 
   <docs label="HCatalog"> 
     <index label="Overview" href="index.html" />
-    <index label="Source Installation" href="install.html" />
+    <index label="Installation From Tarball" href="install.html" />
     <index label="RPM Installation" href="rpminstall.html" />
     <index label="Load &amp; Store Interfaces" href="loadstore.html" />
     <index label="Input &amp; Output Interfaces " href="inputoutput.html" />

Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Thu Apr 12 10:08:45 2012
@@ -19,7 +19,6 @@ package org.apache.hcatalog.cli.Semantic
 
 import java.io.Serializable;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -41,6 +40,7 @@ import org.apache.hadoop.hive.ql.plan.De
 import org.apache.hadoop.hive.ql.plan.DropDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.plan.PartitionSpec;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowTableStatusDesc;
@@ -299,10 +299,16 @@ public class HCatSemanticAnalyzer extend
         // table is partitioned.
       } else {
         //this is actually a ALTER TABLE DROP PARITITION statement
-        for (Map<String, String> partSpec : dropTable.getPartSpecs()) {
+        for (PartitionSpec partSpec : dropTable.getPartSpecs()) {
           // partitions are not added as write entries in drop partitions in Hive
           Table table = hive.getTable(hive.getCurrentDatabase(), dropTable.getTableName());
-          List<Partition> partitions = hive.getPartitions(table, partSpec);
+          List<Partition> partitions = null;
+          try {
+            partitions = hive.getPartitionsByFilter(table, partSpec.toString());
+           } catch (Exception e) {
+            throw new HiveException(e);
+           }
+
           for (Partition part : partitions) {
             authorize(part, Privilege.DROP);
           }

Modified: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/build.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/build.xml?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/build.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/build.xml Thu Apr 12 10:08:45 2012
@@ -17,9 +17,8 @@
 
 <project name="TestHarnessHCatTests" default="test">
 
-  <property name="hcat.dir" value="${basedir}/../../../../"/>
-  <property name="hive.dir" value="${basedir}/../../../../hive/external/"/>
-  <property name="hcat.jar" value="${hcat.dir}/build/hcatalog/hcatalog-${hcatalog.version}.jar"/>
+  <property name="hcat.jar"
+    value="${hcat.dir}/share/hcatalog/hcatalog-${hcatalog.version}.jar"/>
 
   <!-- Separate property name for udfs' build.xml -->
   <property name="hcat.jarfile" value="${hcat.jar}"/>
@@ -41,12 +40,14 @@
 
   <property name="harness.dir" value="${basedir}/../harness"/>
   <property name="harness.tar" value="${harness.dir}/harness.tar"/>
-  <property name="harness.PH_LOCAL" value="."/>
-  <property name="harness.PH_OUT" value="."/>
-
   <property name="test.location" value="${basedir}/testdist"/>
   <property name="benchmark.location" value="${test.location}/benchmarks"/>
-  <property name="hadoop.core.path" value="${harness.hadoop.home}"/>
+  <!--<property name="hadoop.core.path" value="${harness.hadoop.home}"/>-->
+  <property name="hadoop.core.path" value="${hadoop.home}"/>
+  <!-- Override on command line to use rpm.conf -->
+  <property name="harness.conf" value="${test.location}/conf/default.conf"/>
+  <!-- Default value for output directory -->
+  <property name="harness.PH_LOCAL" value="out"/>
 
   <!-- Build the UDFs -->
   <target name="udfs" >
@@ -60,13 +61,15 @@
   </path>
 
   <path id="hive.serde.jar.location">
-    <fileset dir="${hive.dir}/build/serde">
+    <!-- <fileset dir="${hive.dir}/build/serde"> -->
+    <fileset dir="${hive.home}/lib">
       <include name="hive-serde-*.jar"/>
     </fileset>
   </path>
 
   <path id="hive.ql.jar.location">
-    <fileset dir="${hive.dir}/build/ql">
+    <!--<fileset dir="${hive.dir}/build/ql"> -->
+    <fileset dir="${hive.home}/lib">
       <include name="hive-exec-*.jar"/>
     </fileset>
   </path>
@@ -142,83 +145,71 @@
 
   <!-- Check that the necessary properties are setup -->
   <target name="property-check">
-    <fail message="Please set the property harness.cluster.conf to the directory containing hadoop conf "
-      unless="harness.cluster.conf"/>
-    <fail message="Please set the property harness.hadoop.home to the path of your hadoop installation"
-      unless="harness.hadoop.home"/>
-    <fail message="Please set the property hive.metastore.uris to the hcat thrift server"
-      unless="hive.metastore.uris"/>
-    <fail message="Please set the property harness.pig.home to the path of your pig installation"
-      unless="harness.pig.home"/>
-
-  <dirset id="hcat.dist.lib.dir.fs" dir="${hcat.dir}/build" includes="hcatalog-*/share/hcatalog/lib"/>
-  <property name="hcat.dist.lib.dir" value="${hcat.dir}/build/${toString:hcat.dist.lib.dir.fs}"/>
-  <echo message="hcat libs: ${hcat.dist.lib.dir}"/>
-
-  <path id="hbase.jars.path">
-    <fileset dir="${hcat.dist.lib.dir}">
-     <include name="hbase-*.jar" />
-   </fileset>
-  </path>
-  <!-- override this for secure hbase -->
-  <property name="hbase.jars" refid="hbase.jars.path"/>
-
-  <path id="hcat.jars.path">
-    <fileset dir="${hcat.dir}/build/hcatalog">
-     <include name="hcatalog-*.jar" />
-     <exclude name="hcatalog-server-extensions-*.jar" />
-   </fileset>
-  </path>
-  <property name="hcat.jars" refid="hcat.jars.path"/>
-
-  <path id="hcat.client.deps">
-    <pathelement path="${hcat.jars}"/>
-    <pathelement path="${hbase.jars}"/>
-    <fileset dir="${hcat.dist.lib.dir}">
-     <include name="hbase-storage-handler-*.jar" />
-     <include name="zookeeper-*.jar" />
-     <include name="guava-*.jar" /><!-- for hbase storage handler -->
-   </fileset>
-   <!-- hbase-storage-handler only here with default ant target -->
-   <fileset dir="${hcat.dir}/storage-handlers/hbase/build/hbase-storage-handler-0.1.0/lib">
-     <include name="hbase-storage-handler-*.jar" />
-     <include name="zookeeper-*.jar" />
-   </fileset>
-   <fileset dir="${hive.dir}/build/dist/lib/" erroronmissingdir="false">
-     <include name="hive-*.jar" />
-     <include name="libfb303-*.jar" />
-     <include name="guava-*.jar" />
-     <include name="antlr*.jar" />
-   </fileset>
-  </path>
-
-  <pathconvert pathsep="," property="hadoop.libjars" refid="hcat.client.deps"/>
-  <property name="hive.conf.dir" value="${hcat.install.dir}/etc/hcatalog"/>
-
-  <path id="pig.classpath">
-     <path refid="hcat.client.deps" />
-     <pathelement location="${hive.conf.dir}" />
-  </path>
-  <property name="pig.classpath" refid="pig.classpath"/>
-
-  <!-- copy of above w/o antlr -->
-  <path id="pig.additional.jars">
-    <pathelement path="${hcat.jars}"/>
-    <pathelement path="${hbase.jars}"/>
-    <fileset dir="${hcat.dist.lib.dir}">
-     <include name="hbase-storage-handler-*.jar" />
-     <include name="zookeeper-*.jar" />
-     <include name="guava-*.jar" /><!-- for hbase storage handler -->
-   </fileset>
-   <fileset dir="${hive.dir}/build/dist/lib/" erroronmissingdir="false">
-     <include name="hive-*.jar" />
-     <include name="libfb303-*.jar" />
-     <!--include name="antlr*.jar" /-->
-   </fileset>
-   <pathelement location="${hive.conf.dir}" />
-  </path>
-  <property name="pig.additional.jars" refid="pig.additional.jars"/>
-
+    <!--
+    <fail message="Please set the property hadoop.home to the location Hadoop is installed "
+      unless="hadoop.home"/>
+      -->
+    <fail message="Please set the property hadoop.home to the location Hadoop is installed ">
+      <condition>
+        <and>
+          <not>
+            <isset property="hadoop.home"/>
+          </not>
+          <not>
+            <contains string="${harness.conf}" substring="rpm.conf"/>
+          </not>
+        </and>
+      </condition>
+    </fail>
+    <fail message="Please set the property hive.home to the location Hive is installed ">
+      <condition>
+        <and>
+          <not>
+            <isset property="hive.home"/>
+          </not>
+          <not>
+            <contains string="${harness.conf}" substring="rpm.conf"/>
+          </not>
+        </and>
+      </condition>
+    </fail>
+    <fail message="Please set the property hcat.home to the location HCatalog is installed ">
+      <condition>
+        <and>
+          <not>
+            <isset property="hcat.home"/>
+          </not>
+          <not>
+            <contains string="${harness.conf}" substring="rpm.conf"/>
+          </not>
+        </and>
+      </condition>
+    </fail>
+    <fail message="Please set the property pig.home to the location Pig is installed ">
+    <condition>
+        <and>
+          <not>
+            <isset property="pig.home"/>
+          </not>
+          <not>
+            <contains string="${harness.conf}" substring="rpm.conf"/>
+          </not>
+        </and>
+      </condition>
+    </fail>
+
+    <fail message="Please set the property hbase.home to the location HBase is installed ">
+      <condition>
+        <and>
+          <not>
+            <isset property="hbase.home"/>
+          </not>
+          <not>
+            <contains string="${harness.conf}" substring="rpm.conf"/>
+          </not>
+        </and>
+      </condition>
+    </fail>
   </target>
 
   <!-- Prep the test area -->
@@ -241,28 +232,15 @@
     <!-- If they have not specified tests to run then null it out -->
      <property name="tests.to.run" value=""/> 
     <echo />
-    <exec executable="./test_harness.pl" dir="${test.location}">
+    <exec executable="./test_harness.pl" dir="${test.location}" failonerror="true">
       <env key="HARNESS_ROOT" value="."/>
       <env key="PH_LOCAL" value="${harness.PH_LOCAL}"/>
-      <env key="PH_OUT" value="${harness.PH_OUT}"/>
-      <env key="PH_ROOT" value="."/>
-      <env key="HCAT_ROOT" value="${hcat.dir}"/>
-      <env key="HCAT_INSTALL_DIR" value="${hcat.install.dir}"/>
-      <env key="HIVE_ROOT" value="${hcat.dir}/hive/external/"/>
-      <env key="PIG_CLASSPATH" value="${pig.classpath}/"/>
-      <env key="HADOOP_LIBJARS" value="${hadoop.libjars}"/> 
-      <env key="HCAT_JARS" value="${hcat.jars}"/>
-      <env key="HADOOP_HOME" value="${harness.hadoop.home}/"/>
-      <env key="PH_OLDPIG" value="${harness.old.pig}"/>
-      <env key="PH_CLUSTER" value="${harness.cluster.conf}"/>
-      <env key="HCAT_URL" value="${hive.metastore.uris}"/>
-      <env key="METASTORE_PRINCIPAL" value="${metastore.principal}"/>
-      <env key="HIVE_HOME" value="${basedir}/../../../../hive/external/build/dist"/>
-      <env key="PH_CLUSTER_BIN" value="${harness.cluster.bin}"/>
-      <env key="PIG_HOME" value="${harness.pig.home}"/>
-      <env key="HBASE_CONF_DIR" value="${hbase.conf.dir}"/>
-      <env key="HIVE_CONF_DIR" value="${hive.conf.dir}"/>
-      <env key="PIG_ADDITIONAL_JARS" value="${pig.additional.jars}"/>
+      <env key="HADOOP_HOME" value="${hadoop.home}"/>
+      <env key="HIVE_HOME" value="${hive.home}"/>
+      <env key="HCAT_HOME" value="${hcat.home}"/>
+      <env key="PIG_HOME" value="${pig.home}"/>
+      <env key="HBASE_HOME" value="${hbase.home}"/>
+      <arg line="-conf ${harness.conf}"/>
       <arg line="${tests.to.run}"/>
       <arg value="${test.location}/tests/pig.conf"/>
       <arg value="${test.location}/tests/hive.conf"/>
@@ -275,29 +253,21 @@
      <!-- For now default to the existing cluster deployer, since 
     it's all there is.  Once the local deployer is available that
     should be the default. -->
-    <fail message="Please set the property harness.cluster.bin to the directory containing hadoop bin "
-      unless="harness.cluster.bin"/>
    <property name="deploy.conf"
         value="${test.location}/conf/existing_deployer.conf"/>
   </target>
 
   <target name="deploy-base" depends="property-check, tar, init-test, init-deploy">
-    <exec executable="./test_harness.pl" dir="${test.location}">
+    <exec executable="./test_harness.pl" dir="${test.location}"
+      failonerror="true">
       <env key="HARNESS_ROOT" value="."/>
       <env key="PH_LOCAL" value="${harness.PH_LOCAL}"/>
-      <env key="PH_OUT" value="${harness.PH_OUT}"/>
-      <env key="PH_ROOT" value="."/>
-      <env key="HADOOP_HOME" value="${harness.hadoop.home}/"/>
-      <env key="HIVE_ROOT" value="${hcat.dir}/hive/external/"/>
-      <env key="HCAT_ROOT" value="${hcat.dir}"/>
-      <env key="HCAT_INSTALL_DIR" value="${hcat.install.dir}"/>
-      <env key="PH_OLDPIG" value="${harness.old.pig}"/>
-      <env key="PH_CLUSTER" value="${harness.cluster.conf}"/>
-      <env key="PH_CLUSTER_BIN" value="${harness.cluster.bin}"/>
-      <env key="HIVE_HOME" value="../../../../hive/external"/>
-      <env key="PH_METASTORE_THRIFT" value="${harness.metastore.thrift}"/>
-      <env key="PIG_HOME" value="${harness.pig.home}"/>
-      <env key="PIG_JAR" value="${harness.pig.jar}"/> <!-- Pig jar without antlr -->
+      <env key="HADOOP_HOME" value="${hadoop.home}"/>
+      <env key="HIVE_HOME" value="${hive.home}"/>
+      <env key="HCAT_HOME" value="${hcat.home}"/>
+      <env key="PIG_HOME" value="${pig.home}"/>
+      <env key="HBASE_HOME" value="${hbase.home}"/>
+      <arg line="-conf ${harness.conf}"/>
       <arg value="-deploycfg"/>
       <arg value="${deploy.conf}"/>
       <arg value="${deploy.opt}"/>
@@ -319,35 +289,6 @@
     </antcall>
   </target>
 
-  <target name="install" depends="init-test">
-    <exec executable="./libexec/HCatTest/install.sh" dir="${test.location}">
-      <arg value="-D"/>
-      <arg value="${mysql.driver.home}"/>
-      <arg value="-d"/>
-      <arg value="${hcat.install.dir}"/>
-      <arg value="-f"/>
-      <arg value="${forrest.home}"/>
-      <arg value="-h"/>
-      <arg value="${harness.hadoop.home}"/>
-      <arg value="-m"/>
-      <arg value="localhost"/>
-      <arg value="-t"/>
-      <arg value="${hcat.tarball}"/>
-      <arg value="-p"/>
-      <arg value="${hcat.port}"/>
-      <arg value="-P"/>
-      <arg value="dbpassword"/>
-      <arg value="-w"/>
-      <arg value="/user/hive/warehouse"/>
-      <arg value="-s"/>
-      <arg value="${metastore.sasl.enabled}"/>
-      <arg value="-k"/>
-      <arg value="${metastore.keytabpath}"/>
-      <arg value="-K"/>
-      <arg value="${metastore.principal}"/>
-    </exec>
-  </target>
-
   <target name="deploy-test" depends="deploy, test"/>
 
   <target name="deploy-test-undeploy" depends="deploy, test, undeploy"/>

Modified: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/default.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/default.conf?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/default.conf (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/default.conf Thu Apr 12 10:08:45 2012
@@ -19,9 +19,6 @@ chomp $me;
 
 # The contents of this file can be rewritten to fit your installation.
 # Also, you can define the following environment variables and set things up as in the test setup
-# PH_ROOT     Root directory where test harness is installed
-# PH_LOCAL    Root directory for input and output for local mode tests
-# PH_OUT      Root directory where output data will be stored (on local disk, not HDFS)
 # PH_CLUSTER  Root directory for cluster being used
 # HCAT_ROOT   Root directory for hcat version being used
 
@@ -39,10 +36,12 @@ $cfg = {
     , 'localpathbase'     => "$ENV{PH_LOCAL}/out/pigtest/$me" 
 
     #TEST
-    , 'benchmarkPath'    => "$ENV{PH_OUT}/benchmarks"
-    , 'scriptPath'       => "$ENV{PH_ROOT}/libexec"
+    , 'benchmarkPath'    => "$ENV{HARNESS_ROOT}/benchmarks"
+    , 'scriptPath'       => "$ENV{HARNESS_ROOT}/libexec"
     , 'tmpPath'          => "/tmp/pigtest"
     , 'jythonjar'        => "$ENV{PH_JYTHON_JAR}"
+    , 'propertiesFile'     => "./conf/testpropertiesfile.conf"
+    , 'funcjarPath'      => "$ENV{HARNESS_ROOT}/lib/java"
 
     #TESTDB
     , 'dbuser'         => "$ENV{'PH_DBUSER'}" || 'hcattest'
@@ -56,33 +55,29 @@ $cfg = {
     , 'thriftserver' => "$ENV{HCAT_URL}"
 
     #HCAT
-    , 'hcat_data_dir'    => '/user/hcat/tests/data'
-    , 'hivehome'          => $ENV{'PH_HIVE_HOME'}
-    , 'hcathome'          => $ENV{'HCAT_INSTALL_DIR'}
-    , 'hcatalog.jar'          => $ENV{'HADOOP_LIBJARS'}
+    , 'hcathome'          => $ENV{'HCAT_HOME'}
+    , 'hcatshare'         => "$ENV{'HCAT_HOME'}/share/hcatalog"
+    , 'hcatlib'           => "$ENV{'HCAT_HOME'}/share/hcatalog/lib"
+    , 'hcatconf'          => "$ENV{'HCAT_HOME'}/etc/hcatalog"
+    , 'hcatbin'          => "$ENV{'HCAT_HOME'}/bin/hcat"
 
     #PIG
-    , 'testconfigpath'   => "$ENV{PH_CLUSTER}"
-    , 'hadoopbin'   => "$ENV{PH_CLUSTER_BIN}"
-    , 'funcjarPath'      => "$ENV{PH_ROOT}/lib/java"
-    , 'paramPath'        => "$ENV{PH_ROOT}/paramfiles"
-    , 'pigpath'          => "$ENV{PIG_HOME}"
-    , 'oldpigpath'       => "$ENV{PH_OLDPIG}"
-    , 'additionaljars' => "$ENV{PIG_ADDITIONAL_JARS}"
+    , 'pighome'          => $ENV{'PIG_HOME'}
+    , 'pigbin'           => "$ENV{'PIG_HOME'}/bin/pig"
 
     #HADOOP
-    , 'UNUSEDhadoopHome'       => "$ENV{HCAT_ROOT}/lib"
-    , 'userhomePath' => "$ENV{HOME}"
-    , 'local.bin'     => '/usr/bin'
-    , 'logDir'                => "$ENV{PH_OUT}/log" 
-    , 'propertiesFile'     => "./conf/testpropertiesfile.conf"
-    , 'harness.console.level' => 'ERROR'
+    , 'hadoopconfdir'    => "$ENV{'HADOOP_HOME'}/conf"
+    , 'hadoopbin'        => "$ENV{'HADOOP_HOME'}/bin/hadoop"
 
     #HIVE
-    , 'hive_bin_location' => "$ENV{HIVE_ROOT}/build/dist/bin" 
-    , 'hbaseconfigpath'   => "$ENV{HBASE_CONF_DIR}"
-    , 'hivehome' => "$ENV{HIVE_HOME}"
-    , 'hive.additionaljars' =>  "$ENV{HCAT_JARS}"
-    , 'hive.conf.dir' => "$ENV{HIVE_CONF_DIR}" || "$ENV{'HCAT_INSTALL_DIR'}/etc/hcatalog"
+    , 'hivehome'          => $ENV{'HIVE_HOME'}
+    , 'hivelib'           => "$ENV{'HIVE_HOME'}/lib"
+    , 'hivebin'           => "$ENV{'HIVE_HOME'}/bin/hive"
+    , 'hiveconf'          => "$ENV{'HIVE_HOME'}/conf"
+  
+    #HBASE
+    , 'hbaseconf'         => "$ENV{'HBASE_HOME'}/conf"
+    , 'hbaselibdir'       => "$ENV{'HBASE_HOME'}/"
+    , 'zklibdir'          => "$ENV{'HBASE_HOME'}/lib"
 
 };

Added: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/envbased.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/envbased.conf?rev=1325183&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/envbased.conf (added)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/envbased.conf Thu Apr 12 10:08:45 2012
@@ -0,0 +1,78 @@
+############################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+my $me = `whoami`;
+chomp $me;
+
+# The contents of this file can be rewritten to fit your installation.
+# Also, you can define the following environment variables and set things up as in the test setup
+# PH_CLUSTER  Root directory for cluster being used
+# HCAT_ROOT   Root directory for hcat version being used
+
+my $hdfsBase = $ENV{'PH_HDFS_BASE'} || "/user/hcat";
+
+$cfg = {
+    #HDFS
+    'inpathbase'     => "$hdfsBase/tests/data"
+    , 'outpathbase'    => "$hdfsBase/out"
+
+   #LOCAL
+    , 'localinpathbase'   => "$ENV{PH_LOCAL}/in" 
+    , 'localoutpathbase'  => "$ENV{PH_LOCAL}/out/log" 
+    , 'localxmlpathbase'  => "$ENV{PH_LOCAL}/out/xml" 
+    , 'localpathbase'     => "$ENV{PH_LOCAL}/out/pigtest/$me" 
+
+    #TEST
+    , 'benchmarkPath'    => "$ENV{HARNESS_ROOT}/benchmarks"
+    , 'scriptPath'       => "$ENV{HARNESS_ROOT}/libexec"
+    , 'tmpPath'          => "/tmp/pigtest"
+    , 'jythonjar'        => "$ENV{PH_JYTHON_JAR}"
+    , 'propertiesFile'     => "./conf/testpropertiesfile.conf"
+    , 'funcjarPath'      => "$ENV{HARNESS_ROOT}/lib/java"
+
+    #TESTDB
+    , 'dbuser'         => "$ENV{'PH_DBUSER'}" || 'hcattest'
+    , 'dbhost'         => "$ENV{'PH_DBHOST'}" || 'localhost'
+    , 'dbpasswd'       => "$ENV{'PH_DBPASSWD'}" || 'hcattest'
+    , 'dbdb'           => "$ENV{'PH_DBDB'}" || 'hcattestdb'
+
+    #HCAT
+    , 'hcathome'          => $ENV{'HCAT_HOME'}
+    , 'hcatshare'         => $ENV{'HCAT_SHARE_DIR'}
+    , 'hcatlib'           => $ENV{'HCAT_LIB_DIR'}
+    , 'hcatconf'          => $ENV{'HCAT_CONF_DIR'}
+    , 'hcatbin'           => $ENV{'HCAT_CMD'}
+
+    #PIG
+    , 'pighome'          => $ENV{'PIG_HOME'}
+    , 'pigbin'           => "$ENV{'PIG_HOME'}/bin/pig"
+
+    #HADOOP
+    , 'hadoopconfdir'    => $ENV{'HADOOP_CONF_DIR'}
+    , 'hadoopbin'        => "$ENV{'HADOOP_HOME'}/bin/hadoop"
+
+    #HIVE
+    , 'hivehome'          => $ENV{'HIVE_HOME'}
+    , 'hivelib'           => "$ENV{'HIVE_HOME'}/lib"
+    , 'hivebin'           => $ENV{'HIVE_CMD'}
+    , 'hiveconf'          => "$ENV{'HIVE_HOME'}/conf"
+
+    #HBASE
+    , 'hbaseconf'         => "$ENV{'HBASE_HOME'}/conf"
+    , 'hbaselibdir'       => "$ENV{'HBASE_HOME'}/"
+    , 'zklibdir'          => "$ENV{'HBASE_HOME'}/lib"
+
+};

Modified: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/existing_deployer.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/existing_deployer.conf?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/existing_deployer.conf (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/existing_deployer.conf Thu Apr 12 10:08:45 2012
@@ -26,7 +26,7 @@ $cfg = {
 	
 	# hadoop values
 	'hadoopdir'   => $ENV{'PH_CLUSTER'},
-        'hcat_data_dir'  => ("$ENV{'PH_HDFS_BASE'}" || '/user/hcat').'/test/data',
+        'hcat_data_dir'  => ("$ENV{'PH_HDFS_BASE'}" || '/user/hcat').'/tests/data',
 
 	# db values
 # 	'dbuser' => 'pigtester',

Added: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/rpm.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/rpm.conf?rev=1325183&view=auto
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/rpm.conf (added)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/conf/rpm.conf Thu Apr 12 10:08:45 2012
@@ -0,0 +1,84 @@
+############################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+
+my $me = `whoami`;
+chomp $me;
+
+# The contents of this file can be rewritten to fit your installation.
+# Also, you can define the following environment variables and set things up as in the test setup
+# PH_CLUSTER  Root directory for cluster being used
+# HCAT_ROOT   Root directory for hcat version being used
+
+my $hdfsBase = $ENV{'PH_HDFS_BASE'} || "/user/hcat";
+
+$ENV{'HCAT_HOME'} = "/usr/lib/hcatalog";
+$ENV{'HADOOP_HOME'} = "/usr/lib/hadoop";
+$ENV{'HIVE_HOME'} = "/usr/lib/hive";
+$ENV{'HBASE_HOME'} = "/usr/lib/hbase";
+$ENV{'PIG_HOME'} = "/usr/lib/pig";
+
+$cfg = {
+    #HDFS
+    'inpathbase'     => "$hdfsBase/tests/data"
+    , 'outpathbase'    => "$hdfsBase/out"
+
+   #LOCAL
+    , 'localinpathbase'   => "$ENV{PH_LOCAL}/in" 
+    , 'localoutpathbase'  => "$ENV{PH_LOCAL}/out/log" 
+    , 'localxmlpathbase'  => "$ENV{PH_LOCAL}/out/xml" 
+    , 'localpathbase'     => "$ENV{PH_LOCAL}/out/pigtest/$me" 
+
+    #TEST
+    , 'benchmarkPath'    => "$ENV{HARNESS_ROOT}/benchmarks"
+    , 'scriptPath'       => "$ENV{HARNESS_ROOT}/libexec"
+    , 'tmpPath'          => "/tmp/pigtest"
+    , 'jythonjar'        => "$ENV{PH_JYTHON_JAR}"
+    , 'propertiesFile'     => "./conf/testpropertiesfile.conf"
+    , 'funcjarPath'      => "$ENV{HARNESS_ROOT}/lib/java"
+
+    #TESTDB
+    , 'dbuser'         => "$ENV{'PH_DBUSER'}" || 'hcattest'
+    , 'dbhost'         => "$ENV{'PH_DBHOST'}" || 'localhost'
+    , 'dbpasswd'       => "$ENV{'PH_DBPASSWD'}" || 'hcattest'
+    , 'dbdb'           => "$ENV{'PH_DBDB'}" || 'hcattestdb'
+
+    #HCAT
+    , 'hcathome'          => $ENV{'HCAT_HOME'}
+    , 'hcatshare'         => "$ENV{'HCAT_HOME'}/share/hcatalog"
+    , 'hcatlib'           => "$ENV{'HCAT_HOME'}/lib"
+    , 'hcatconf'          => "$ENV{'HCAT_HOME'}/conf"
+    , 'hcatbin'          => "$ENV{'HCAT_HOME'}/bin/hcat"
+
+    #PIG
+    , 'pighome'          => $ENV{'PIG_HOME'}
+    , 'pigbin'           => "/usr/bin/pig"
+
+    #HADOOP
+    , 'hadoopconfdir'    => "$ENV{'HADOOP_HOME'}/conf"
+    , 'hadoopbin'        => "/usr/bin/hadoop"
+
+    #HIVE
+    , 'hivehome'          => $ENV{'HIVE_HOME'}
+    , 'hivelib'           => "$ENV{'HIVE_HOME'}/lib"
+    , 'hivebin'           => "/usr/bin/hive"
+    , 'hiveconf'          => "$ENV{'HIVE_HOME'}/conf"
+
+    #HBASE
+    , 'hbaseconf'         => "$ENV{'HBASE_HOME'}/conf"
+    , 'hbaselibdir'       => "$ENV{'HBASE_HOME'}/"
+    , 'zklibdir'          => "$ENV{'HBASE_HOME'}/lib"
+
+};

Modified: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/deployers/HCatExistingClusterDeployer.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/deployers/HCatExistingClusterDeployer.pm?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/deployers/HCatExistingClusterDeployer.pm (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/deployers/HCatExistingClusterDeployer.pm Thu Apr 12 10:08:45 2012
@@ -70,6 +70,14 @@ sub checkPrerequisites
         print $log "You must set the environment variable HADOOP_HOME";
         die "HADOOP_HOME not defined";
     }
+    if (! defined $ENV{'HCAT_HOME'} || $ENV{'HCAT_HOME'} eq "") {
+        print $log "You must set the environment variable HCAT_HOME";
+        die "HCAT_HOME not defined";
+    }
+    if (! defined $ENV{'HIVE_HOME'} || $ENV{'HIVE_HOME'} eq "") {
+        print $log "You must set the environment variable HIVEOP_HOME";
+        die "HIVE_HOME not defined";
+    }
 
     # Run a quick and easy Hadoop command to make sure we can
     Util::runHadoopCmd($cfg, $log, "fs -ls /");
@@ -171,9 +179,9 @@ sub generateData
     );
 
     
-    if (defined($cfg->{'load_hive_only'}) && $cfg->{'load_hive_only'} == 1) {
-        return $self->hiveMetaOnly($cfg, $log, \@tables);
-    }
+#   if (defined($cfg->{'load_hive_only'}) && $cfg->{'load_hive_only'} == 1) {
+#       return $self->hiveMetaOnly($cfg, $log, \@tables);
+#   }
 
     # Create the HDFS directories
     Util::runHadoopCmd($cfg, $log, "fs -mkdir $cfg->{'hcat_data_dir'}");

Modified: incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm?rev=1325183&r1=1325182&r2=1325183&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm Thu Apr 12 10:08:45 2012
@@ -53,47 +53,7 @@ sub new
     return $self;
 }
 
-sub replaceParameters
-{
-##!!! Move this to Util.pm
-
-    my ($self, $cmd, $outfile, $testCmd, $log) = @_;
-
-    # $self
-    $cmd =~ s/:LATESTOUTPUTPATH:/$self->{'latestoutputpath'}/g;
-
-    # $outfile
-    $cmd =~ s/:OUTPATH:/$outfile/g;
-
-    # $ENV
-    $cmd =~ s/:PIGHARNESS:/$ENV{HARNESS_ROOT}/g;
-
-    # $testCmd
-    $cmd =~ s/:INPATH:/$testCmd->{'inpathbase'}/g;
-    $cmd =~ s/:OUTPATH:/$outfile/g;
-    $cmd =~ s/:FUNCPATH:/$testCmd->{'funcjarPath'}/g;
-    $cmd =~ s/:PIGPATH:/$testCmd->{'pigpath'}/g;
-    $cmd =~ s/:RUNID:/$testCmd->{'UID'}/g;
-    $cmd =~ s/:USRHOMEPATH:/$testCmd->{'userhomePath'}/g;
-    $cmd =~ s/:MAPREDJARS:/$testCmd->{'mapredjars'}/g;
-    $cmd =~ s/:SCRIPTHOMEPATH:/$testCmd->{'scriptPath'}/g;
-    $cmd =~ s/:DBUSER:/$testCmd->{'dbuser'}/g;
-    $cmd =~ s/:DBNAME:/$testCmd->{'dbdb'}/g;
-#    $cmd =~ s/:LOCALINPATH:/$testCmd->{'localinpathbase'}/g;
-#    $cmd =~ s/:LOCALOUTPATH:/$testCmd->{'localoutpathbase'}/g;
-#    $cmd =~ s/:LOCALTESTPATH:/$testCmd->{'localpathbase'}/g;
-    $cmd =~ s/:BMPATH:/$testCmd->{'benchmarkPath'}/g;
-    $cmd =~ s/:TMP:/$testCmd->{'tmpPath'}/g;
-    $cmd =~ s/:HDFSTMP:/tmp\/$testCmd->{'runid'}/g;
 
-    if ( $testCmd->{'hadoopSecurity'} eq "secure" ) { 
-      $cmd =~ s/:REMOTECLUSTER:/$testCmd->{'remoteSecureCluster'}/g;
-    } else {
-      $cmd =~ s/:REMOTECLUSTER:/$testCmd->{'remoteNotSecureCluster'}/g;
-    }
-
-    return $cmd;
-}
 
 sub globalSetup
 {
@@ -152,7 +112,7 @@ sub runHCatCmdLine
     my $subName = (caller(0))[3];
     my %result;
     my $outfile = $testCmd->{'outpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
-    my $hcatCmd = $self->replaceParameters( $testCmd->{'hcat'}, $outfile, $testCmd, $log);
+    my $hcatCmd = Util::replaceParameters( $testCmd->{'hcat'}, $outfile, $testCmd, $log);
     my $outdir  = $testCmd->{'localpath'} . $testCmd->{'group'} . "_" . $testCmd->{'num'} . ".out";
     my ($stdoutfile, $stderrfile);