You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by ha...@apache.org on 2011/12/06 20:05:39 UTC

svn commit: r1211077 [7/7] - in /incubator/hcatalog/trunk: ./ conf/ src/test/e2e/hcatalog/ src/test/e2e/hcatalog/conf/ src/test/e2e/hcatalog/deployers/ src/test/e2e/hcatalog/drivers/ src/test/e2e/hcatalog/tests/ src/test/e2e/hcatalog/tools/generate/ sr...

Added: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java?rev=1211077&view=auto
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java (added)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java Tue Dec  6 20:05:37 2011
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.utils;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+
+/**
+ * This is a map reduce test for testing hcat which goes against the "numbers"
+ * table. It performs a group by on the first column and a SUM operation on the
+ * other columns. This is to simulate a typical operation in a map reduce
+ * program to test that hcat hands the right data to the map reduce program
+ * 
+ * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
+ * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
+ * location should be specified as file://<full path to jar>
+ */
+public class ReadWrite extends Configured implements Tool {
+
+    public static class Map extends
+            Mapper<WritableComparable, HCatRecord, Text, HCatRecord> {
+
+        String name;
+        int age;
+        double gpa;
+
+        @Override
+        protected void map(
+                WritableComparable key,
+                HCatRecord value,
+                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, Text, HCatRecord>.Context context)
+                throws IOException, InterruptedException {
+            name = (String) value.get(0);
+            age = (Integer) value.get(1);
+            gpa = (Double) value.get(2);
+            context.write(new Text(name), value);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String inputTableName = args[1];
+        String outputTableName = args[2];
+        String dbName = null;
+
+        String principalID = System
+                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "ReadWrite");
+        HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
+                inputTableName, null, serverUri, principalID));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setJarByClass(ReadWrite.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(Text.class);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
+                outputTableName, null, serverUri, principalID));
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        System.err.println("INFO: output schema explicitly set for writing:"
+                + s);
+        HCatOutputFormat.setSchema(job, s);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new ReadWrite(), args);
+        System.exit(exitCode);
+    }
+}

Added: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java?rev=1211077&view=auto
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java (added)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java Tue Dec  6 20:05:37 2011
@@ -0,0 +1,107 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.utils;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+
+/**
+ * This is a map reduce test for testing hcat which goes against the "numbers"
+ * table. It performs a group by on the first column and a SUM operation on the
+ * other columns. This is to simulate a typical operation in a map reduce program
+ * to test that hcat hands the right data to the map reduce program
+ * 
+ * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat jar>
+            The <tab|ctrla> argument controls the output delimiter
+            The hcat jar location should be specified as file://<full path to jar>
+ */
+public class SimpleRead extends Configured implements Tool {
+
+    private static final String TABLE_NAME = "studenttab10k";
+    private static final String TAB = "\t";
+    
+  public static class Map
+       extends Mapper<WritableComparable, HCatRecord, Text, DoubleWritable>{
+      
+      String name;
+      int age;
+      double gpa;
+      
+    @Override
+  protected void map(WritableComparable key, HCatRecord value, 
+          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
+          Text,DoubleWritable>.Context context) 
+    throws IOException ,InterruptedException {
+        name = (String) value.get(0);
+        age = (Integer) value.get(1);
+        gpa = (Double) value.get(2);
+        context.write(new Text(name), new DoubleWritable(gpa));
+
+    }
+  }
+  
+   public int run(String[] args) throws Exception {
+    Configuration conf = getConf();
+    args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+    String serverUri = args[0];
+    String tableName = args[1];
+    String outputDir = args[2];
+    String dbName = null;
+    
+    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+    if(principalID != null)
+    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+    Job job = new Job(conf, "SimpleRead");
+    HCatInputFormat.setInput(job, InputJobInfo.create(
+    		dbName, tableName, null, serverUri, principalID));
+    // initialize HCatOutputFormat
+    
+    job.setInputFormatClass(HCatInputFormat.class);
+    job.setOutputFormatClass(TextOutputFormat.class);
+    job.setJarByClass(SimpleRead.class);
+    job.setMapperClass(Map.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(DoubleWritable.class);
+    FileOutputFormat.setOutputPath(job, new Path(outputDir));
+    return (job.waitForCompletion(true) ? 0 : 1);
+  }
+   
+   public static void main(String[] args) throws Exception {
+       int exitCode = ToolRunner.run(new SimpleRead(), args);
+       System.exit(exitCode);
+   }
+}

Added: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java?rev=1211077&view=auto
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java (added)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteJson.java Tue Dec  6 20:05:37 2011
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.utils;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+
+/**
+ * This is a map reduce test for testing hcat which goes against the "numbers"
+ * table. It performs a group by on the first column and a SUM operation on the
+ * other columns. This is to simulate a typical operation in a map reduce
+ * program to test that hcat hands the right data to the map reduce program
+ * 
+ * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
+ * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
+ * location should be specified as file://<full path to jar>
+ */
+public class WriteJson extends Configured implements Tool {
+
+    public static class Map extends
+            Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> {
+
+        String s;
+        Integer i;
+        Double d;
+
+        @Override
+        protected void map(
+                WritableComparable key,
+                HCatRecord value,
+                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>.Context context)
+                throws IOException, InterruptedException {
+            s = value.get(0)==null?null:(String)value.get(0);
+            i = value.get(1)==null?null:(Integer)value.get(1);
+            d = value.get(2)==null?null:(Double)value.get(2);
+            
+            HCatRecord record = new DefaultHCatRecord(5);
+            record.set(0, s);
+            record.set(1, i);
+            record.set(2, d);
+            
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String inputTableName = args[1];
+        String outputTableName = args[2];
+        String dbName = null;
+
+        String principalID = System
+                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "WriteJson");
+        HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
+                inputTableName, null, serverUri, principalID));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setJarByClass(WriteJson.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(WritableComparable.class);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        job.setNumReduceTasks(0);
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
+                outputTableName, null, serverUri, principalID));
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        System.err.println("INFO: output schema explicitly set for writing:"
+                + s);
+        HCatOutputFormat.setSchema(job, s);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new WriteJson(), args);
+        System.exit(exitCode);
+    }
+}

Added: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java?rev=1211077&view=auto
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java (added)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteRC.java Tue Dec  6 20:05:37 2011
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.utils;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+
+/**
+ * This is a map reduce test for testing hcat which goes against the "numbers"
+ * table. It performs a group by on the first column and a SUM operation on the
+ * other columns. This is to simulate a typical operation in a map reduce
+ * program to test that hcat hands the right data to the map reduce program
+ * 
+ * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
+ * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
+ * location should be specified as file://<full path to jar>
+ */
+public class WriteRC extends Configured implements Tool {
+
+    public static class Map extends
+            Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> {
+
+        String name;
+        Integer age;
+        Double gpa;
+        
+        @Override
+        protected void map(
+                WritableComparable key,
+                HCatRecord value,
+                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>.Context context)
+                throws IOException, InterruptedException {
+            name = value.get(0)==null?null:(String)value.get(0);
+            age = value.get(1)==null?null:(Integer)value.get(1);
+            gpa = value.get(2)==null?null:(Double)value.get(2);
+            
+            HCatRecord record = new DefaultHCatRecord(5);
+            record.set(0, name);
+            record.set(1, age);
+            record.set(2, gpa);
+            
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String inputTableName = args[1];
+        String outputTableName = args[2];
+        String dbName = null;
+
+        String principalID = System
+                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "WriteRC");
+        HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
+                inputTableName, null, serverUri, principalID));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setJarByClass(WriteRC.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(WritableComparable.class);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        job.setNumReduceTasks(0);
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
+                outputTableName, null, serverUri, principalID));
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        System.err.println("INFO: output schema explicitly set for writing:"
+                + s);
+        HCatOutputFormat.setSchema(job, s);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new WriteRC(), args);
+        System.exit(exitCode);
+    }
+}

Added: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java?rev=1211077&view=auto
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java (added)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/WriteText.java Tue Dec  6 20:05:37 2011
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hcatalog.utils;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.hcatalog.data.DefaultHCatRecord;
+import org.apache.hcatalog.data.HCatRecord;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatInputFormat;
+import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.apache.hcatalog.mapreduce.InputJobInfo;
+import org.apache.hcatalog.mapreduce.OutputJobInfo;
+
+/**
+ * This is a map reduce test for testing hcat which goes against the "numbers"
+ * table. It performs a group by on the first column and a SUM operation on the
+ * other columns. This is to simulate a typical operation in a map reduce
+ * program to test that hcat hands the right data to the map reduce program
+ * 
+ * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
+ * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
+ * location should be specified as file://<full path to jar>
+ */
+public class WriteText extends Configured implements Tool {
+
+    public static class Map extends
+            Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> {
+
+        int t;
+        int si;
+        int i;
+        long b;
+        float f;
+        double d;
+        String s;
+
+        @Override
+        protected void map(
+                WritableComparable key,
+                HCatRecord value,
+                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>.Context context)
+                throws IOException, InterruptedException {
+            t = (Integer)value.get(0);
+            si = (Integer)value.get(1);
+            i = (Integer)value.get(2);
+            b = (Long)value.get(3);
+            f = (Float)value.get(4);
+            d = (Double)value.get(5);
+            s = (String)value.get(6);
+            
+            HCatRecord record = new DefaultHCatRecord(7);
+            record.set(0, t);
+            record.set(1, si);
+            record.set(2, i);
+            record.set(3, b);
+            record.set(4, f);
+            record.set(5, d);
+            record.set(6, s);
+            
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String inputTableName = args[1];
+        String outputTableName = args[2];
+        String dbName = null;
+
+        String principalID = System
+                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "WriteText");
+        HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
+                inputTableName, null, serverUri, principalID));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setJarByClass(WriteText.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(WritableComparable.class);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        job.setNumReduceTasks(0);
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
+                outputTableName, null, serverUri, principalID));
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        System.err.println("INFO: output schema explicitly set for writing:"
+                + s);
+        HCatOutputFormat.setSchema(job, s);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new WriteText(), args);
+        System.exit(exitCode);
+    }
+}