You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by kh...@apache.org on 2013/09/06 02:49:17 UTC

svn commit: r1520466 [9/18] - in /hive/trunk/hcatalog: core/src/main/java/org/apache/hcatalog/cli/ core/src/main/java/org/apache/hcatalog/cli/SemanticAnalysis/ core/src/main/java/org/apache/hcatalog/common/ core/src/main/java/org/apache/hcatalog/data/ ...

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestPermsGrp.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.cli;
+
+import java.io.FileNotFoundException;
+import java.util.ArrayList;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.Type;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hcatalog.ExitException;
+import org.apache.hcatalog.NoExitSecurityManager;
+
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestPermsGrp extends TestCase {
+
+    private boolean isServerRunning = false;
+    private static final int msPort = 20101;
+    private HiveConf hcatConf;
+    private Warehouse clientWH;
+    private HiveMetaStoreClient msc;
+    private static final Logger LOG = LoggerFactory.getLogger(TestPermsGrp.class);
+
+    @Override
+    protected void tearDown() throws Exception {
+        System.setSecurityManager(securityManager);
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+
+        if (isServerRunning) {
+            return;
+        }
+
+        MetaStoreUtils.startMetaStore(msPort, ShimLoader.getHadoopThriftAuthBridge());
+
+        isServerRunning = true;
+
+        securityManager = System.getSecurityManager();
+        System.setSecurityManager(new NoExitSecurityManager());
+
+        hcatConf = new HiveConf(this.getClass());
+        hcatConf.set("hive.metastore.local", "false");
+        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
+        hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+        hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+
+        hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+        clientWH = new Warehouse(hcatConf);
+        msc = new HiveMetaStoreClient(hcatConf, null);
+        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
+        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
+    }
+
+
+    public void testCustomPerms() throws Exception {
+
+        String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+        String tblName = "simptbl";
+        String typeName = "Person";
+
+        try {
+
+            // Lets first test for default permissions, this is the case when user specified nothing.
+            Table tbl = getTable(dbName, tblName, typeName);
+            msc.createTable(tbl);
+            Database db = Hive.get(hcatConf).getDatabase(dbName);
+            Path dfsPath = clientWH.getTablePath(db, tblName);
+            cleanupTbl(dbName, tblName, typeName);
+
+            // Next user did specify perms.
+            try {
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx-wx---"});
+            } catch (Exception e) {
+                assertTrue(e instanceof ExitException);
+                assertEquals(((ExitException) e).getStatus(), 0);
+            }
+            dfsPath = clientWH.getTablePath(db, tblName);
+            assertTrue(dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission().equals(FsPermission.valueOf("drwx-wx---")));
+
+            cleanupTbl(dbName, tblName, typeName);
+
+            // User specified perms in invalid format.
+            hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
+            // make sure create table fails.
+            try {
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx"});
+                assert false;
+            } catch (Exception me) {
+                assertTrue(me instanceof ExitException);
+            }
+            // No physical dir gets created.
+            dfsPath = clientWH.getTablePath(db, tblName);
+            try {
+                dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
+                assert false;
+            } catch (Exception fnfe) {
+                assertTrue(fnfe instanceof FileNotFoundException);
+            }
+
+            // And no metadata gets created.
+            try {
+                msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof NoSuchObjectException);
+                assertEquals("default.simptbl table not found", e.getMessage());
+            }
+
+            // test for invalid group name
+            hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
+            hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
+
+            try {
+                // create table must fail.
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
+                assert false;
+            } catch (Exception me) {
+                assertTrue(me instanceof SecurityException);
+            }
+
+            try {
+                // no metadata should get created.
+                msc.getTable(dbName, tblName);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof NoSuchObjectException);
+                assertEquals("default.simptbl table not found", e.getMessage());
+            }
+            try {
+                // neither dir should get created.
+                dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof FileNotFoundException);
+            }
+
+        } catch (Exception e) {
+            LOG.error("testCustomPerms failed.", e);
+            throw e;
+        }
+    }
+
+    private void silentDropDatabase(String dbName) throws MetaException, TException {
+        try {
+            for (String tableName : msc.getTables(dbName, "*")) {
+                msc.dropTable(dbName, tableName);
+            }
+
+        } catch (NoSuchObjectException e) {
+        }
+    }
+
+    private void cleanupTbl(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, InvalidOperationException {
+
+        msc.dropTable(dbName, tblName);
+        msc.dropType(typeName);
+    }
+
+    private Table getTable(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, AlreadyExistsException, InvalidObjectException {
+
+        msc.dropTable(dbName, tblName);
+        silentDropDatabase(dbName);
+
+
+        msc.dropType(typeName);
+        Type typ1 = new Type();
+        typ1.setName(typeName);
+        typ1.setFields(new ArrayList<FieldSchema>(1));
+        typ1.getFields().add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));
+        msc.createType(typ1);
+
+        Table tbl = new Table();
+        tbl.setDbName(dbName);
+        tbl.setTableName(tblName);
+        StorageDescriptor sd = new StorageDescriptor();
+        tbl.setSd(sd);
+        sd.setCols(typ1.getFields());
+
+        sd.setSerdeInfo(new SerDeInfo());
+        return tbl;
+    }
+
+
+    private SecurityManager securityManager;
+
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestSemanticAnalysis.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,419 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.cli;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
+import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
+import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.mapred.TextInputFormat;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.mapreduce.HCatBaseTest;
+import org.apache.thrift.TException;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestSemanticAnalysis extends HCatBaseTest {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TestSemanticAnalysis.class);
+    private static final String TBL_NAME = "junit_sem_analysis";
+
+    private Driver hcatDriver = null;
+    private String query;
+
+    @Before
+    public void setUpHCatDriver() throws IOException {
+        if (hcatDriver == null) {
+            HiveConf hcatConf = new HiveConf(hiveConf);
+            hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+                    HCatSemanticAnalyzer.class.getName());
+            hcatDriver = new Driver(hcatConf);
+            SessionState.start(new CliSessionState(hcatConf));
+        }
+    }
+
+    @Test
+    public void testDescDB() throws CommandNeedRetryException, IOException {
+        hcatDriver.run("drop database mydb cascade");
+        assertEquals(0, hcatDriver.run("create database mydb").getResponseCode());
+        CommandProcessorResponse resp = hcatDriver.run("describe database mydb");
+        assertEquals(0, resp.getResponseCode());
+        ArrayList<String> result = new ArrayList<String>();
+        hcatDriver.getResults(result);
+        assertTrue(result.get(0).contains("mydb.db"));
+        hcatDriver.run("drop database mydb cascade");
+    }
+
+    @Test
+    public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException {
+        driver.run("drop table junit_sem_analysis");
+        CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
+        assertEquals(resp.getResponseCode(), 0);
+        assertEquals(null, resp.getErrorMessage());
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals("Partition key name case problem", "b", tbl.getPartitionKeys().get(0).getName());
+        driver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
+        driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')");
+        hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE");
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        List<String> partVals = new ArrayList<String>(1);
+        partVals.add("2010-10-10");
+        Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals);
+
+        assertEquals(RCFileInputFormat.class.getName(), part.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), part.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testUsNonExistentDB() throws CommandNeedRetryException {
+        CommandProcessorResponse resp = hcatDriver.run("use no_such_db");
+        assertEquals(1, resp.getResponseCode());
+    }
+
+    @Test
+    public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
+
+        List<String> dbs = client.getAllDatabases();
+        String testDb1 = "testdatabaseoperatons1";
+        String testDb2 = "testdatabaseoperatons2";
+
+        if (dbs.contains(testDb1.toLowerCase())) {
+            assertEquals(0, hcatDriver.run("drop database " + testDb1).getResponseCode());
+        }
+
+        if (dbs.contains(testDb2.toLowerCase())) {
+            assertEquals(0, hcatDriver.run("drop database " + testDb2).getResponseCode());
+        }
+
+        assertEquals(0, hcatDriver.run("create database " + testDb1).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb1));
+        assertEquals(0, hcatDriver.run("create database if not exists " + testDb1).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb1));
+        assertEquals(0, hcatDriver.run("create database if not exists " + testDb2).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb2));
+
+        assertEquals(0, hcatDriver.run("drop database " + testDb1).getResponseCode());
+        assertEquals(0, hcatDriver.run("drop database " + testDb2).getResponseCode());
+        assertFalse(client.getAllDatabases().contains(testDb1));
+        assertFalse(client.getAllDatabases().contains(testDb2));
+    }
+
+    @Test
+    public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table " + TBL_NAME);
+        hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE");
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        List<FieldSchema> cols = tbl.getSd().getCols();
+        assertEquals(1, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
+        assertEquals(0, resp.getResponseCode());
+        assertNull(resp.getErrorMessage());
+        tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        cols = tbl.getSd().getCols();
+        assertEquals(1, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblTouch() throws CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch");
+        assertEquals(0, response.getResponseCode());
+
+        hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
+        assertEquals(0, response.getResponseCode());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testChangeColumns() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis change a1 a string");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis change a a int after c");
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+
+        response = hcatDriver.run("describe extended junit_sem_analysis");
+        assertEquals(0, response.getResponseCode());
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        List<FieldSchema> cols = tbl.getSd().getCols();
+        assertEquals(2, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null)));
+        assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null)));
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblClusteredBy() throws CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
+        hcatDriver.run("desc extended junit_sem_analysis");
+
+        tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddPartFail() throws CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'");
+        assertEquals(0, response.getResponseCode());
+        driver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddPartPass() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '" + TEST_DATA_DIR + "'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testCTAS() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) as select * from tbl2";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(40000, response.getResponseCode());
+        assertTrue(response.getErrorMessage().contains("FAILED: SemanticException Operation not supported. Create table as Select is not a valid operation."));
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testStoredAs() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int)";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
+                "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' ";
+        assertEquals(0, hcatDriver.run(query).getResponseCode());
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b int)  stored as RCFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(40000, response.getResponseCode());
+        assertEquals("FAILED: SemanticException Operation not supported. HCatalog only supports partition columns of type string. For column: b Found type: int",
+                response.getErrorMessage());
+
+    }
+
+    @Test
+    public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+
+    }
+
+    @Test
+    public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+
+    }
+
+    @Test
+    public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+    }
+
+    @Test
+    public void testCTLFail() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("drop table like_table");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
+
+        driver.run(query);
+        query = "create table like_table like junit_sem_analysis";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+    }
+
+    @Test
+    public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        try {
+            hcatDriver.run("drop table junit_sem_analysis");
+        } catch (Exception e) {
+            LOG.error("Error in drop table.", e);
+        }
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
+
+        hcatDriver.run(query);
+        String likeTbl = "like_table";
+        hcatDriver.run("drop table " + likeTbl);
+        query = "create table like_table like junit_sem_analysis";
+        CommandProcessorResponse resp = hcatDriver.run(query);
+        assertEquals(0, resp.getResponseCode());
+//    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTbl);
+//    assertEquals(likeTbl,tbl.getTableName());
+//    List<FieldSchema> cols = tbl.getSd().getCols();
+//    assertEquals(1, cols.size());
+//    assertEquals(new FieldSchema("a", "int", null), cols.get(0));
+//    assertEquals("org.apache.hadoop.hive.ql.io.RCFileInputFormat",tbl.getSd().getInputFormat());
+//    assertEquals("org.apache.hadoop.hive.ql.io.RCFileOutputFormat",tbl.getSd().getOutputFormat());
+//    Map<String, String> tblParams = tbl.getParameters();
+//    assertEquals("org.apache.hadoop.hive.hcat.rcfile.RCFileInputStorageDriver", tblParams.get("hcat.isd"));
+//    assertEquals("org.apache.hadoop.hive.hcat.rcfile.RCFileOutputStorageDriver", tblParams.get("hcat.osd"));
+//
+//    hcatDriver.run("drop table junit_sem_analysis");
+//    hcatDriver.run("drop table "+likeTbl);
+    }
+
+// This test case currently fails, since add partitions don't inherit anything from tables.
+
+//  public void testAddPartInheritDrivers() throws MetaException, TException, NoSuchObjectException{
+//
+//    hcatDriver.run("drop table "+TBL_NAME);
+//    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+//    hcatDriver.run("alter table "+TBL_NAME+" add partition (b='2010-10-10')");
+//
+//    List<String> partVals = new ArrayList<String>(1);
+//    partVals.add("2010-10-10");
+//
+//    Map<String,String> map = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals).getParameters();
+//    assertEquals(map.get(InitializeInput.HOWL_ISD_CLASS), RCFileInputStorageDriver.class.getName());
+//    assertEquals(map.get(InitializeInput.HOWL_OSD_CLASS), RCFileOutputStorageDriver.class.getName());
+//  }
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestStorageHandlerProperties.java.broken
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestStorageHandlerProperties.java.broken?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestStorageHandlerProperties.java.broken (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestStorageHandlerProperties.java.broken Fri Sep  6 00:49:14 2013
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hcatalog.cli;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.hcatalog.common.HCatConstants;
+import org.apache.thrift.TException;
+
+import junit.framework.TestCase;
+
+public class TestStorageHandlerProperties extends TestCase {
+
+    private Driver hcatDriver;
+    private Driver hiveDriver;
+    private HiveMetaStoreClient msc;
+
+    protected void setUp() throws Exception {
+        HiveConf hcatConf = new HiveConf(this.getClass());
+        hcatConf.set(ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.POSTEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        HiveConf hiveConf = new HiveConf(hcatConf,this.getClass());
+        hiveDriver = new Driver(hiveConf);
+
+        hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        hcatDriver = new Driver(hcatConf);
+
+        msc = new HiveMetaStoreClient(hcatConf);
+        SessionState.start(new CliSessionState(hcatConf));
+    }
+
+    public void testTableProperties() throws CommandNeedRetryException, MetaException ,TException, NoSuchObjectException{
+        hcatDriver.run("drop table test_table");
+        CommandProcessorResponse response = hcatDriver
+                .run("create table test_table(key int, value string) STORED BY " +
+                     "'org.apache.hcatalog.cli.DummyStorageHandler' ");
+
+        assertEquals(0, response.getResponseCode());
+        Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "test_table");
+        DummyStorageHandler dsh = new DummyStorageHandler();
+        assertTrue(tbl.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
+        assertTrue(tbl.getParameters().containsKey(HCatConstants.HCAT_OSD_CLASS));
+        assertEquals(tbl.getParameters().get(HCatConstants.HCAT_ISD_CLASS), dsh.getInputStorageDriver().getName());
+        assertEquals(tbl.getParameters().get(HCatConstants.HCAT_OSD_CLASS), dsh.getOutputStorageDriver().getName());
+    }
+
+    /* @throws java.lang.Exception
+     * @see junit.framework.TestCase#tearDown()
+     */
+    protected void tearDown() throws Exception {
+        super.tearDown();
+    }
+
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestUseDatabase.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestUseDatabase.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestUseDatabase.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/cli/TestUseDatabase.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.cli;
+
+import java.io.IOException;
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+
+/* Unit test for GitHub Howl issue #3 */
+public class TestUseDatabase extends TestCase {
+
+    private Driver hcatDriver;
+
+    @Override
+    protected void setUp() throws Exception {
+
+        HiveConf hcatConf = new HiveConf(this.getClass());
+        hcatConf.set(ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.POSTEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        hcatDriver = new Driver(hcatConf);
+        SessionState.start(new CliSessionState(hcatConf));
+    }
+
+    String query;
+    private final String dbName = "testUseDatabase_db";
+    private final String tblName = "testUseDatabase_tbl";
+
+    public void testAlterTablePass() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("create database " + dbName);
+        hcatDriver.run("use " + dbName);
+        hcatDriver.run("create table " + tblName + " (a int) partitioned by (b string) stored as RCFILE");
+
+        CommandProcessorResponse response;
+
+        response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '/tmp'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+
+        response = hcatDriver.run("alter table " + tblName + " set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+
+        hcatDriver.run("drop table " + tblName);
+        hcatDriver.run("drop database " + dbName);
+    }
+
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHCatUtil.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.common;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class TestHCatUtil {
+
+    @Test
+    public void testFsPermissionOperation() {
+
+        HashMap<String, Integer> permsCode = new HashMap<String, Integer>();
+
+        for (int i = 0; i < 8; i++) {
+            for (int j = 0; j < 8; j++) {
+                for (int k = 0; k < 8; k++) {
+                    StringBuilder sb = new StringBuilder();
+                    sb.append("0");
+                    sb.append(i);
+                    sb.append(j);
+                    sb.append(k);
+                    Integer code = (((i * 8) + j) * 8) + k;
+                    String perms = (new FsPermission(Short.decode(sb.toString()))).toString();
+                    if (permsCode.containsKey(perms)) {
+                        Assert.assertEquals("permissions(" + perms + ") mapped to multiple codes", code, permsCode.get(perms));
+                    }
+                    permsCode.put(perms, code);
+                    assertFsPermissionTransformationIsGood(perms);
+                }
+            }
+        }
+    }
+
+    private void assertFsPermissionTransformationIsGood(String perms) {
+        Assert.assertEquals(perms, FsPermission.valueOf("-" + perms).toString());
+    }
+
+    @Test
+    public void testValidateMorePermissive() {
+        assertConsistentFsPermissionBehaviour(FsAction.ALL, true, true, true, true, true, true, true, true);
+        assertConsistentFsPermissionBehaviour(FsAction.READ, false, true, false, true, false, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.WRITE, false, true, false, false, true, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.EXECUTE, false, true, true, false, false, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.READ_EXECUTE, false, true, true, true, false, true, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.READ_WRITE, false, true, false, true, true, false, true, false);
+        assertConsistentFsPermissionBehaviour(FsAction.WRITE_EXECUTE, false, true, true, false, true, false, false, true);
+        assertConsistentFsPermissionBehaviour(FsAction.NONE, false, true, false, false, false, false, false, false);
+    }
+
+
+    private void assertConsistentFsPermissionBehaviour(
+            FsAction base, boolean versusAll, boolean versusNone,
+            boolean versusX, boolean versusR, boolean versusW,
+            boolean versusRX, boolean versusRW, boolean versusWX) {
+
+        Assert.assertTrue(versusAll == HCatUtil.validateMorePermissive(base, FsAction.ALL));
+        Assert.assertTrue(versusX == HCatUtil.validateMorePermissive(base, FsAction.EXECUTE));
+        Assert.assertTrue(versusNone == HCatUtil.validateMorePermissive(base, FsAction.NONE));
+        Assert.assertTrue(versusR == HCatUtil.validateMorePermissive(base, FsAction.READ));
+        Assert.assertTrue(versusRX == HCatUtil.validateMorePermissive(base, FsAction.READ_EXECUTE));
+        Assert.assertTrue(versusRW == HCatUtil.validateMorePermissive(base, FsAction.READ_WRITE));
+        Assert.assertTrue(versusW == HCatUtil.validateMorePermissive(base, FsAction.WRITE));
+        Assert.assertTrue(versusWX == HCatUtil.validateMorePermissive(base, FsAction.WRITE_EXECUTE));
+    }
+
+    @Test
+    public void testExecutePermissionsCheck() {
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.ALL));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.NONE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.EXECUTE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_EXECUTE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE_EXECUTE));
+
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ));
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE));
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_WRITE));
+
+    }
+
+    @Test
+    public void testGetTableSchemaWithPtnColsApi() throws IOException {
+        // Check the schema of a table with one field & no partition keys.
+        StorageDescriptor sd = new StorageDescriptor(
+                Lists.newArrayList(new FieldSchema("username", serdeConstants.STRING_TYPE_NAME, null)),
+                "location", "org.apache.hadoop.mapred.TextInputFormat",
+                "org.apache.hadoop.mapred.TextOutputFormat", false, -1, new SerDeInfo(),
+                new ArrayList<String>(), new ArrayList<Order>(), new HashMap<String, String>());
+        org.apache.hadoop.hive.metastore.api.Table apiTable =
+                new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
+                        0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
+                        "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
+        Table table = new Table(apiTable);
+
+        List<HCatFieldSchema> expectedHCatSchema =
+                Lists.newArrayList(new HCatFieldSchema("username", HCatFieldSchema.Type.STRING, null));
+
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+
+        // Add a partition key & ensure its reflected in the schema.
+        List<FieldSchema> partitionKeys =
+                Lists.newArrayList(new FieldSchema("dt", serdeConstants.STRING_TYPE_NAME, null));
+        table.getTTable().setPartitionKeys(partitionKeys);
+        expectedHCatSchema.add(new HCatFieldSchema("dt", HCatFieldSchema.Type.STRING, null));
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+    }
+
+    /**
+     * Hive represents tables in two ways:
+     * <ul>
+     *   <li>org.apache.hadoop.hive.metastore.api.Table - exactly whats stored in the metastore</li>
+     *   <li>org.apache.hadoop.hive.ql.metadata.Table - adds business logic over api.Table</li>
+     * </ul>
+     * Here we check SerDe-reported fields are included in the table schema.
+     */
+    @Test
+    public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException {
+        Map<String, String> parameters = Maps.newHashMap();
+        parameters.put(serdeConstants.SERIALIZATION_CLASS,
+                "org.apache.hadoop.hive.serde2.thrift.test.IntString");
+        parameters.put(serdeConstants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol");
+
+        SerDeInfo serDeInfo = new SerDeInfo(null,
+                "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer", parameters);
+
+        // StorageDescriptor has an empty list of fields - SerDe will report them.
+        StorageDescriptor sd = new StorageDescriptor(new ArrayList<FieldSchema>(), "location",
+                "org.apache.hadoop.mapred.TextInputFormat", "org.apache.hadoop.mapred.TextOutputFormat",
+                false, -1, serDeInfo, new ArrayList<String>(), new ArrayList<Order>(),
+                new HashMap<String, String>());
+
+        org.apache.hadoop.hive.metastore.api.Table apiTable =
+                new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
+                        0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
+                        "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
+        Table table = new Table(apiTable);
+
+        List<HCatFieldSchema> expectedHCatSchema = Lists.newArrayList(
+                new HCatFieldSchema("myint", HCatFieldSchema.Type.INT, null),
+                new HCatFieldSchema("mystring", HCatFieldSchema.Type.STRING, null),
+                new HCatFieldSchema("underscore_int", HCatFieldSchema.Type.INT, null));
+
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+    }
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/common/TestHiveClientCache.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.common;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hcatalog.NoExitSecurityManager;
+import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
+import org.apache.thrift.TException;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.auth.login.LoginException;
+import java.io.IOException;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+public class TestHiveClientCache {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TestHiveClientCache.class);
+    final HiveConf hiveConf = new HiveConf();
+
+    @BeforeClass
+    public static void setUp() throws Exception {
+    }
+
+    @AfterClass
+    public static void tearDown() throws Exception {
+    }
+
+    @Test
+    public void testCacheHit() throws IOException, MetaException, LoginException {
+
+        HiveClientCache cache = new HiveClientCache(1000);
+        HiveMetaStoreClient client = cache.get(hiveConf);
+        assertNotNull(client);
+        client.close(); // close shouldn't matter
+
+        // Setting a non important configuration should return the same client only
+        hiveConf.setIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS, 10);
+        HiveMetaStoreClient client2 = cache.get(hiveConf);
+        assertNotNull(client2);
+        assertEquals(client, client2);
+        client2.close();
+    }
+
+    @Test
+    public void testCacheMiss() throws IOException, MetaException, LoginException {
+        HiveClientCache cache = new HiveClientCache(1000);
+        HiveMetaStoreClient client = cache.get(hiveConf);
+        assertNotNull(client);
+
+        // Set different uri as it is one of the criteria deciding whether to return the same client or not
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " "); // URIs are checked for string equivalence, even spaces make them different
+        HiveMetaStoreClient client2 = cache.get(hiveConf);
+        assertNotNull(client2);
+        assertNotSame(client, client2);
+    }
+
+    /**
+     * Check that a new client is returned for the same configuration after the expiry time.
+     * Also verify that the expiry time configuration is honoured
+     */
+    @Test
+    public void testCacheExpiry() throws IOException, MetaException, LoginException, InterruptedException {
+        HiveClientCache cache = new HiveClientCache(1);
+        HiveClientCache.CacheableHiveMetaStoreClient client = (HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+        assertNotNull(client);
+
+        Thread.sleep(2500);
+        HiveMetaStoreClient client2 = cache.get(hiveConf);
+        client.close();
+        assertTrue(client.isClosed()); // close() after *expiry time* and *a cache access* should  have tore down the client
+
+        assertNotNull(client2);
+        assertNotSame(client, client2);
+    }
+
+    /**
+     * Check that a *new* client is created if asked from different threads even with
+     * the same hive configuration
+     * @throws ExecutionException
+     * @throws InterruptedException
+     */
+    @Test
+    public void testMultipleThreadAccess() throws ExecutionException, InterruptedException {
+        final HiveClientCache cache = new HiveClientCache(1000);
+
+        class GetHiveClient implements Callable<HiveMetaStoreClient> {
+            @Override
+            public HiveMetaStoreClient call() throws IOException, MetaException, LoginException {
+                return cache.get(hiveConf);
+            }
+        }
+
+        ExecutorService executor = Executors.newFixedThreadPool(2);
+
+        Callable<HiveMetaStoreClient> worker1 = new GetHiveClient();
+        Callable<HiveMetaStoreClient> worker2 = new GetHiveClient();
+        Future<HiveMetaStoreClient> clientFuture1 = executor.submit(worker1);
+        Future<HiveMetaStoreClient> clientFuture2 = executor.submit(worker2);
+        HiveMetaStoreClient client1 = clientFuture1.get();
+        HiveMetaStoreClient client2 = clientFuture2.get();
+        assertNotNull(client1);
+        assertNotNull(client2);
+        assertNotSame(client1, client2);
+    }
+
+    @Test
+    public void testCloseAllClients() throws IOException, MetaException, LoginException {
+        final HiveClientCache cache = new HiveClientCache(1000);
+        HiveClientCache.CacheableHiveMetaStoreClient client1 = (HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " "); // URIs are checked for string equivalence, even spaces make them different
+        HiveClientCache.CacheableHiveMetaStoreClient client2 = (HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+        cache.closeAllClientsQuietly();
+        assertTrue(client1.isClosed());
+        assertTrue(client2.isClosed());
+    }
+
+    /**
+     * Test that a long table name actually breaks the HMSC. Subsequently check that isOpen() reflects
+     * and tells if the client is broken
+     */
+    @Ignore("hangs indefinitely")
+    @Test
+    public void testHMSCBreakability() throws IOException, MetaException, LoginException, TException, AlreadyExistsException,
+            InvalidObjectException, NoSuchObjectException, InterruptedException {
+        // Setup
+        LocalMetaServer metaServer = new LocalMetaServer();
+        metaServer.start();
+
+        final HiveClientCache cache = new HiveClientCache(1000);
+        HiveClientCache.CacheableHiveMetaStoreClient client =
+                (HiveClientCache.CacheableHiveMetaStoreClient) cache.get(metaServer.getHiveConf());
+
+        assertTrue(client.isOpen());
+
+        final String DB_NAME = "test_db";
+        final String LONG_TABLE_NAME = "long_table_name_" + new BigInteger(200, new Random()).toString(2);
+
+        try {
+            client.dropTable(DB_NAME, LONG_TABLE_NAME);
+        } catch (Exception e) {
+        }
+        try {
+            client.dropDatabase(DB_NAME);
+        } catch (Exception e) {
+        }
+
+        client.createDatabase(new Database(DB_NAME, "", null, null));
+
+        List<FieldSchema> fields = new ArrayList<FieldSchema>();
+        fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, ""));
+        Table tbl = new Table();
+        tbl.setDbName(DB_NAME);
+        tbl.setTableName(LONG_TABLE_NAME);
+        StorageDescriptor sd = new StorageDescriptor();
+        sd.setCols(fields);
+        tbl.setSd(sd);
+        sd.setSerdeInfo(new SerDeInfo());
+
+        // Break the client
+        try {
+            client.createTable(tbl);
+            fail("Exception was expected while creating table with long name");
+        } catch (Exception e) {
+        }
+
+        assertFalse(client.isOpen());
+        metaServer.shutDown();
+    }
+
+    private static class LocalMetaServer implements Runnable {
+        public final int MS_PORT = 20101;
+        private final HiveConf hiveConf;
+        private final SecurityManager securityManager;
+        public final static int WAIT_TIME_FOR_BOOTUP = 30000;
+
+        public LocalMetaServer() {
+            securityManager = System.getSecurityManager();
+            System.setSecurityManager(new NoExitSecurityManager());
+            hiveConf = new HiveConf(TestHiveClientCache.class);
+            hiveConf.set("hive.metastore.local", "false");
+            hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:"
+                    + MS_PORT);
+            hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+            hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, 3);
+            hiveConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+                    HCatSemanticAnalyzer.class.getName());
+            hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+            hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+            hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname,
+                    "false");
+            System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
+            System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
+        }
+
+        public void start() throws InterruptedException {
+            Thread thread = new Thread(this);
+            thread.start();
+            Thread.sleep(WAIT_TIME_FOR_BOOTUP); // Wait for the server to bootup
+        }
+
+        @Override
+        public void run() {
+            try {
+                HiveMetaStore.main(new String[]{"-v", "-p", String.valueOf(MS_PORT)});
+            } catch (Throwable t) {
+                LOG.error("Exiting. Got exception from metastore: ", t);
+            }
+        }
+
+        public HiveConf getHiveConf() {
+            return hiveConf;
+        }
+
+        public void shutDown() {
+            System.setSecurityManager(securityManager);
+        }
+    }
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/HCatDataCheckUtil.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/HCatDataCheckUtil.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/HCatDataCheckUtil.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/HCatDataCheckUtil.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.data;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hcatalog.MiniCluster;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Helper class for Other Data Testers
+ */
+public class HCatDataCheckUtil {
+
+    private static final Logger LOG = LoggerFactory.getLogger(HCatDataCheckUtil.class);
+
+    public static Driver instantiateDriver(MiniCluster cluster) {
+        HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
+        for (Entry e : cluster.getProperties().entrySet()) {
+            hiveConf.set(e.getKey().toString(), e.getValue().toString());
+        }
+        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
+        Driver driver = new Driver(hiveConf);
+        SessionState.start(new CliSessionState(hiveConf));
+        return driver;
+    }
+
+    public static void generateDataFile(MiniCluster cluster, String fileName) throws IOException {
+        MiniCluster.deleteFile(cluster, fileName);
+        String[] input = new String[50];
+        for (int i = 0; i < 50; i++) {
+            input[i] = (i % 5) + "\t" + i + "\t" + "_S" + i + "S_";
+        }
+        MiniCluster.createInputFile(cluster, fileName, input);
+    }
+
+    public static void createTable(Driver driver, String tableName, String createTableArgs)
+        throws CommandNeedRetryException, IOException {
+        String createTable = "create table " + tableName + createTableArgs;
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + "]");
+        }
+    }
+
+    public static void dropTable(Driver driver, String tablename) throws IOException, CommandNeedRetryException {
+        driver.run("drop table if exists " + tablename);
+    }
+
+    public static ArrayList<String> formattedRun(Driver driver, String name, String selectCmd)
+        throws CommandNeedRetryException, IOException {
+        driver.run(selectCmd);
+        ArrayList<String> src_values = new ArrayList<String>();
+        driver.getResults(src_values);
+        LOG.info("{} : {}", name, src_values);
+        return src_values;
+    }
+
+
+    public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
+        return (compareRecords(first, second) == 0);
+    }
+
+    public static int compareRecords(HCatRecord first, HCatRecord second) {
+        return compareRecordContents(first.getAll(), second.getAll());
+    }
+
+    public static int compareRecordContents(List<Object> first, List<Object> second) {
+        int mySz = first.size();
+        int urSz = second.size();
+        if (mySz != urSz) {
+            return mySz - urSz;
+        } else {
+            for (int i = 0; i < first.size(); i++) {
+                int c = DataType.compare(first.get(i), second.get(i));
+                if (c != 0) {
+                    return c;
+                }
+            }
+            return 0;
+        }
+    }
+
+
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestDefaultHCatRecord.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestDefaultHCatRecord.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestDefaultHCatRecord.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestDefaultHCatRecord.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,260 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.data;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hcatalog.common.HCatException;
+import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.data.schema.HCatSchemaUtils;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+public class TestDefaultHCatRecord extends TestCase {
+
+    public void testRYW() throws IOException {
+
+        File f = new File("binary.dat");
+        f.delete();
+        f.createNewFile();
+        f.deleteOnExit();
+
+        OutputStream fileOutStream = new FileOutputStream(f);
+        DataOutput outStream = new DataOutputStream(fileOutStream);
+
+        HCatRecord[] recs = getHCatRecords();
+        for (int i = 0; i < recs.length; i++) {
+            recs[i].write(outStream);
+        }
+        fileOutStream.flush();
+        fileOutStream.close();
+
+        InputStream fInStream = new FileInputStream(f);
+        DataInput inpStream = new DataInputStream(fInStream);
+
+        for (int i = 0; i < recs.length; i++) {
+            HCatRecord rec = new DefaultHCatRecord();
+            rec.readFields(inpStream);
+            Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[i], rec));
+        }
+
+        Assert.assertEquals(fInStream.available(), 0);
+        fInStream.close();
+
+    }
+
+    public void testCompareTo() {
+        HCatRecord[] recs = getHCatRecords();
+        Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[0], recs[1]) == 0);
+        Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[4], recs[5]) == 0);
+    }
+
+    public void testEqualsObject() {
+
+        HCatRecord[] recs = getHCatRecords();
+        Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[0], recs[1]));
+        Assert.assertTrue(HCatDataCheckUtil.recordsEqual(recs[4], recs[5]));
+    }
+
+    /**
+     * Test get and set calls with type
+     * @throws HCatException
+     */
+    public void testGetSetByType1() throws HCatException {
+        HCatRecord inpRec = getHCatRecords()[0];
+        HCatRecord newRec = new DefaultHCatRecord(inpRec.size());
+        HCatSchema hsch =
+                HCatSchemaUtils.getHCatSchema(
+                        "a:tinyint,b:smallint,c:int,d:bigint,e:float,f:double,g:boolean,h:string,i:binary,j:string");
+
+
+        newRec.setByte("a", hsch, inpRec.getByte("a", hsch));
+        newRec.setShort("b", hsch, inpRec.getShort("b", hsch));
+        newRec.setInteger("c", hsch, inpRec.getInteger("c", hsch));
+        newRec.setLong("d", hsch, inpRec.getLong("d", hsch));
+        newRec.setFloat("e", hsch, inpRec.getFloat("e", hsch));
+        newRec.setDouble("f", hsch, inpRec.getDouble("f", hsch));
+        newRec.setBoolean("g", hsch, inpRec.getBoolean("g", hsch));
+        newRec.setString("h", hsch, inpRec.getString("h", hsch));
+        newRec.setByteArray("i", hsch, inpRec.getByteArray("i", hsch));
+        newRec.setString("j", hsch, inpRec.getString("j", hsch));
+
+        Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec));
+
+
+    }
+
+    /**
+     * Test get and set calls with type
+     * @throws HCatException
+     */
+    public void testGetSetByType2() throws HCatException {
+        HCatRecord inpRec = getGetSet2InpRec();
+
+        HCatRecord newRec = new DefaultHCatRecord(inpRec.size());
+        HCatSchema hsch =
+                HCatSchemaUtils.getHCatSchema("a:binary,b:map<string,string>,c:array<int>,d:struct<i:int>");
+
+
+        newRec.setByteArray("a", hsch, inpRec.getByteArray("a", hsch));
+        newRec.setMap("b", hsch, inpRec.getMap("b", hsch));
+        newRec.setList("c", hsch, inpRec.getList("c", hsch));
+        newRec.setStruct("d", hsch, inpRec.getStruct("d", hsch));
+
+        Assert.assertTrue(HCatDataCheckUtil.recordsEqual(newRec, inpRec));
+    }
+
+
+    private HCatRecord getGetSet2InpRec() {
+        List<Object> rlist = new ArrayList<Object>();
+
+        rlist.add(new byte[]{1, 2, 3});
+
+        Map<Short, String> mapcol = new HashMap<Short, String>(3);
+        mapcol.put(new Short("2"), "hcat is cool");
+        mapcol.put(new Short("3"), "is it?");
+        mapcol.put(new Short("4"), "or is it not?");
+        rlist.add(mapcol);
+
+        List<Integer> listcol = new ArrayList<Integer>();
+        listcol.add(314);
+        listcol.add(007);
+        rlist.add(listcol);//list
+        rlist.add(listcol);//struct
+        return new DefaultHCatRecord(rlist);
+    }
+
+    private HCatRecord[] getHCatRecords() {
+
+        List<Object> rec_1 = new ArrayList<Object>(8);
+        rec_1.add(new Byte("123"));
+        rec_1.add(new Short("456"));
+        rec_1.add(new Integer(789));
+        rec_1.add(new Long(1000L));
+        rec_1.add(new Float(5.3F));
+        rec_1.add(new Double(5.3D));
+        rec_1.add(new Boolean(true));
+        rec_1.add(new String("hcat and hadoop"));
+        rec_1.add(null);
+        rec_1.add("null");
+
+        HCatRecord tup_1 = new DefaultHCatRecord(rec_1);
+
+        List<Object> rec_2 = new ArrayList<Object>(8);
+        rec_2.add(new Byte("123"));
+        rec_2.add(new Short("456"));
+        rec_2.add(new Integer(789));
+        rec_2.add(new Long(1000L));
+        rec_2.add(new Float(5.3F));
+        rec_2.add(new Double(5.3D));
+        rec_2.add(new Boolean(true));
+        rec_2.add(new String("hcat and hadoop"));
+        rec_2.add(null);
+        rec_2.add("null");
+        HCatRecord tup_2 = new DefaultHCatRecord(rec_2);
+
+        List<Object> rec_3 = new ArrayList<Object>(10);
+        rec_3.add(new Byte("123"));
+        rec_3.add(new Short("456"));
+        rec_3.add(new Integer(789));
+        rec_3.add(new Long(1000L));
+        rec_3.add(new Double(5.3D));
+        rec_3.add(new String("hcat and hadoop"));
+        rec_3.add(null);
+        List<Integer> innerList = new ArrayList<Integer>();
+        innerList.add(314);
+        innerList.add(007);
+        rec_3.add(innerList);
+        Map<Short, String> map = new HashMap<Short, String>(3);
+        map.put(new Short("2"), "hcat is cool");
+        map.put(new Short("3"), "is it?");
+        map.put(new Short("4"), "or is it not?");
+        rec_3.add(map);
+
+        HCatRecord tup_3 = new DefaultHCatRecord(rec_3);
+
+        List<Object> rec_4 = new ArrayList<Object>(8);
+        rec_4.add(new Byte("123"));
+        rec_4.add(new Short("456"));
+        rec_4.add(new Integer(789));
+        rec_4.add(new Long(1000L));
+        rec_4.add(new Double(5.3D));
+        rec_4.add(new String("hcat and hadoop"));
+        rec_4.add(null);
+        rec_4.add("null");
+
+        Map<Short, String> map2 = new HashMap<Short, String>(3);
+        map2.put(new Short("2"), "hcat is cool");
+        map2.put(new Short("3"), "is it?");
+        map2.put(new Short("4"), "or is it not?");
+        rec_4.add(map2);
+        List<Integer> innerList2 = new ArrayList<Integer>();
+        innerList2.add(314);
+        innerList2.add(007);
+        rec_4.add(innerList2);
+        HCatRecord tup_4 = new DefaultHCatRecord(rec_4);
+
+
+        List<Object> rec_5 = new ArrayList<Object>(3);
+        rec_5.add(getByteArray());
+        rec_5.add(getStruct());
+        rec_5.add(getList());
+        HCatRecord tup_5 = new DefaultHCatRecord(rec_5);
+
+
+        List<Object> rec_6 = new ArrayList<Object>(3);
+        rec_6.add(getByteArray());
+        rec_6.add(getStruct());
+        rec_6.add(getList());
+        HCatRecord tup_6 = new DefaultHCatRecord(rec_6);
+
+
+        return new HCatRecord[]{tup_1, tup_2, tup_3, tup_4, tup_5, tup_6};
+
+    }
+
+    private Object getList() {
+        return getStruct();
+    }
+
+    private Object getByteArray() {
+        return new byte[]{1, 2, 3, 4};
+    }
+
+    private List<?> getStruct() {
+        List<Object> struct = new ArrayList<Object>();
+        struct.add(new Integer(1));
+        struct.add(new String("x"));
+        return struct;
+    }
+}

Added: hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java?rev=1520466&view=auto
==============================================================================
--- hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java (added)
+++ hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/data/TestHCatRecordSerDe.java Fri Sep  6 00:49:14 2013
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hcatalog.data;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.io.Writable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestHCatRecordSerDe extends TestCase {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TestHCatRecordSerDe.class);
+
+    public Map<Properties, HCatRecord> getData() {
+        Map<Properties, HCatRecord> data = new HashMap<Properties, HCatRecord>();
+
+        List<Object> rlist = new ArrayList<Object>(11);
+        rlist.add(new Byte("123"));
+        rlist.add(new Short("456"));
+        rlist.add(new Integer(789));
+        rlist.add(new Long(1000L));
+        rlist.add(new Double(5.3D));
+        rlist.add(new Float(2.39F));
+        rlist.add(new String("hcat and hadoop"));
+        rlist.add(null);
+
+        List<Object> innerStruct = new ArrayList<Object>(2);
+        innerStruct.add(new String("abc"));
+        innerStruct.add(new String("def"));
+        rlist.add(innerStruct);
+
+        List<Integer> innerList = new ArrayList<Integer>();
+        innerList.add(314);
+        innerList.add(007);
+        rlist.add(innerList);
+
+        Map<Short, String> map = new HashMap<Short, String>(3);
+        map.put(new Short("2"), "hcat is cool");
+        map.put(new Short("3"), "is it?");
+        map.put(new Short("4"), "or is it not?");
+        rlist.add(map);
+
+        rlist.add(new Boolean(true));
+
+        List<Object> c1 = new ArrayList<Object>();
+        List<Object> c1_1 = new ArrayList<Object>();
+        c1_1.add(new Integer(12));
+        List<Object> i2 = new ArrayList<Object>();
+        List<Integer> ii1 = new ArrayList<Integer>();
+        ii1.add(new Integer(13));
+        ii1.add(new Integer(14));
+        i2.add(ii1);
+        Map<String, List<?>> ii2 = new HashMap<String, List<?>>();
+        List<Integer> iii1 = new ArrayList<Integer>();
+        iii1.add(new Integer(15));
+        ii2.put("phew", iii1);
+        i2.add(ii2);
+        c1_1.add(i2);
+        c1.add(c1_1);
+        rlist.add(c1);
+        List<Object> am = new ArrayList<Object>();
+        Map<String, String> am_1 = new HashMap<String, String>();
+        am_1.put("noo", "haha");
+        am.add(am_1);
+        rlist.add(am);
+        List<Object> aa = new ArrayList<Object>();
+        List<String> aa_1 = new ArrayList<String>();
+        aa_1.add("bloo");
+        aa_1.add("bwahaha");
+        aa.add(aa_1);
+        rlist.add(aa);
+
+        String typeString =
+                "tinyint,smallint,int,bigint,double,float,string,string,"
+                        + "struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
+                        + "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
+                        + "array<map<string,string>>,array<array<string>>";
+        Properties props = new Properties();
+
+        props.put(serdeConstants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa");
+        props.put(serdeConstants.LIST_COLUMN_TYPES, typeString);
+//    props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
+//    props.put(Constants.SERIALIZATION_FORMAT, "1");
+
+        data.put(props, new DefaultHCatRecord(rlist));
+        return data;
+    }
+
+    public void testRW() throws Exception {
+
+        Configuration conf = new Configuration();
+
+        for (Entry<Properties, HCatRecord> e : getData().entrySet()) {
+            Properties tblProps = e.getKey();
+            HCatRecord r = e.getValue();
+
+            HCatRecordSerDe hrsd = new HCatRecordSerDe();
+            hrsd.initialize(conf, tblProps);
+
+            LOG.info("ORIG: {}", r);
+
+            Writable s = hrsd.serialize(r, hrsd.getObjectInspector());
+            LOG.info("ONE: {}", s);
+
+            HCatRecord r2 = (HCatRecord) hrsd.deserialize(s);
+            Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, r2));
+
+            // If it went through correctly, then s is also a HCatRecord,
+            // and also equal to the above, and a deepcopy, and this holds
+            // through for multiple levels more of serialization as well.
+
+            Writable s2 = hrsd.serialize(s, hrsd.getObjectInspector());
+            LOG.info("TWO: {}", s2);
+            Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) s));
+            Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) s2));
+
+            // serialize using another serde, and read out that object repr.
+            LazySimpleSerDe testSD = new LazySimpleSerDe();
+            testSD.initialize(conf, tblProps);
+
+            Writable s3 = testSD.serialize(s, hrsd.getObjectInspector());
+            LOG.info("THREE: {}", s3);
+            Object o3 = testSD.deserialize(s3);
+            Assert.assertFalse(r.getClass().equals(o3.getClass()));
+
+            // then serialize again using hrsd, and compare results
+            HCatRecord s4 = (HCatRecord) hrsd.serialize(o3, testSD.getObjectInspector());
+            LOG.info("FOUR: {}", s4);
+
+            // Test LazyHCatRecord init and read
+            LazyHCatRecord s5 = new LazyHCatRecord(o3, testSD.getObjectInspector());
+            LOG.info("FIVE: {}", s5);
+
+            LazyHCatRecord s6 = new LazyHCatRecord(s4, hrsd.getObjectInspector());
+            LOG.info("SIX: {}", s6);
+
+        }
+
+    }
+
+}