You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by to...@apache.org on 2012/05/01 16:26:31 UTC
svn commit: r1332740 [1/2] - in /incubator/hcatalog/branches/branch-0.4: ./
src/java/org/apache/hcatalog/cli/
src/java/org/apache/hcatalog/cli/SemanticAnalysis/
src/java/org/apache/hcatalog/common/ src/java/org/apache/hcatalog/data/
src/java/org/apache...
Author: toffer
Date: Tue May 1 16:26:30 2012
New Revision: 1332740
URL: http://svn.apache.org/viewvc?rev=1332740&view=rev
Log:
merged from trunk: HCATALOG-68 Logging from HCat (avandana via toffer)
Modified:
incubator/hcatalog/branches/branch-0.4/ (props changed)
incubator/hcatalog/branches/branch-0.4/CHANGES.txt
incubator/hcatalog/branches/branch-0.4/ivy.xml
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/HCatCli.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/JsonSerDe.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java
incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestJsonSerDe.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/rcfile/TestRCFileMapReduceInputFormat.java
Propchange: incubator/hcatalog/branches/branch-0.4/
------------------------------------------------------------------------------
Merged /incubator/hcatalog/trunk:r1332736
Modified: incubator/hcatalog/branches/branch-0.4/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/CHANGES.txt?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/CHANGES.txt (original)
+++ incubator/hcatalog/branches/branch-0.4/CHANGES.txt Tue May 1 16:26:30 2012
@@ -80,6 +80,8 @@ Release 0.4.0 - Unreleased
HCAT-2 Support nested schema conversion between Hive an Pig (julienledem via hashutosh)
IMPROVEMENTS
+ HCAT-68 Logging from HCat (avandana via toffer)
+
HCAT-383 Add clover to build.xml (gates)
HCAT-372 Add filter information to Load/Store and Input/Output docs (lefty via gates)
Modified: incubator/hcatalog/branches/branch-0.4/ivy.xml
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/ivy.xml?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/ivy.xml (original)
+++ incubator/hcatalog/branches/branch-0.4/ivy.xml Tue May 1 16:26:30 2012
@@ -60,7 +60,9 @@
rev="${hadoop-test.version}" conf="common->master" />
<dependency org="javax.jms" name="jms" rev="${jms.version}"
conf="common->master" />
- <dependency org="org.apache.activemq" name="activemq-all"
+ <dependency org="org.apache.activemq" name="activemq-core"
+ rev="${activemq.version}" conf="common->master" />
+ <dependency org="org.apache.activemq" name="kahadb"
rev="${activemq.version}" conf="common->master" />
<dependency org="javax.management.j2ee" name="management-api"
rev="${javax-mgmt.version}" conf="common->master" />
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/HCatCli.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/HCatCli.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/HCatCli.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/HCatCli.java Tue May 1 16:26:30 2012
@@ -139,7 +139,7 @@ public class HCatCli {
}
if (execString != null && fileName != null) {
- System.err.println("The '-e' and '-f' options cannot be specified simultaneously");
+ ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
printUsage(options,ss.err);
System.exit(1);
}
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Tue May 1 16:26:30 2012
@@ -20,8 +20,6 @@ package org.apache.hcatalog.cli.Semantic
import java.io.Serializable;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -55,7 +53,7 @@ public class HCatSemanticAnalyzer extend
private AbstractSemanticAnalyzerHook hook;
private ASTNode ast;
- private static final Log LOG = LogFactory.getLog(HCatSemanticAnalyzer.class);
+
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
@@ -72,7 +70,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_CREATEDATABASE:
hook = new CreateDatabaseHook();
return hook.preAnalyze(context, ast);
-
+
case HiveParser.TOK_ALTERTABLE_PARTITION:
if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
return ast;
@@ -82,7 +80,7 @@ public class HCatSemanticAnalyzer extend
} else {
return ast;
}
-
+
// HCat will allow these operations to be performed.
// Database DDL
case HiveParser.TOK_SHOWDATABASES:
@@ -96,19 +94,19 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_CREATEINDEX:
case HiveParser.TOK_DROPINDEX:
case HiveParser.TOK_SHOWINDEXES:
-
+
// View DDL
// "alter view add partition" does not work because of the nature of implementation
- // of the DDL in hive. Hive will internally invoke another Driver on the select statement,
+ // of the DDL in hive. Hive will internally invoke another Driver on the select statement,
// and HCat does not let "select" statement through. I cannot find a way to get around it
- // without modifying hive code. So just leave it unsupported.
+ // without modifying hive code. So just leave it unsupported.
//case HiveParser.TOK_ALTERVIEW_ADDPARTS:
case HiveParser.TOK_ALTERVIEW_DROPPARTS:
case HiveParser.TOK_ALTERVIEW_PROPERTIES:
case HiveParser.TOK_ALTERVIEW_RENAME:
case HiveParser.TOK_CREATEVIEW:
case HiveParser.TOK_DROPVIEW:
-
+
// Authorization DDL
case HiveParser.TOK_CREATEROLE:
case HiveParser.TOK_DROPROLE:
@@ -119,7 +117,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_REVOKE:
case HiveParser.TOK_SHOW_GRANT:
case HiveParser.TOK_SHOW_ROLE_GRANT:
-
+
// Misc DDL
case HiveParser.TOK_LOCKTABLE:
case HiveParser.TOK_UNLOCKTABLE:
@@ -127,7 +125,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_DESCFUNCTION:
case HiveParser.TOK_SHOWFUNCTIONS:
case HiveParser.TOK_EXPLAIN:
-
+
// Table DDL
case HiveParser.TOK_ALTERTABLE_ADDPARTS:
case HiveParser.TOK_ALTERTABLE_ADDCOLS:
@@ -166,7 +164,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_CREATETABLE:
case HiveParser.TOK_CREATEDATABASE:
case HiveParser.TOK_ALTERTABLE_PARTITION:
-
+
// HCat will allow these operations to be performed.
// Database DDL
case HiveParser.TOK_SHOWDATABASES:
@@ -180,7 +178,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_CREATEINDEX:
case HiveParser.TOK_DROPINDEX:
case HiveParser.TOK_SHOWINDEXES:
-
+
// View DDL
//case HiveParser.TOK_ALTERVIEW_ADDPARTS:
case HiveParser.TOK_ALTERVIEW_DROPPARTS:
@@ -188,7 +186,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_ALTERVIEW_RENAME:
case HiveParser.TOK_CREATEVIEW:
case HiveParser.TOK_DROPVIEW:
-
+
// Authorization DDL
case HiveParser.TOK_CREATEROLE:
case HiveParser.TOK_DROPROLE:
@@ -199,7 +197,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_REVOKE:
case HiveParser.TOK_SHOW_GRANT:
case HiveParser.TOK_SHOW_ROLE_GRANT:
-
+
// Misc DDL
case HiveParser.TOK_LOCKTABLE:
case HiveParser.TOK_UNLOCKTABLE:
@@ -207,7 +205,7 @@ public class HCatSemanticAnalyzer extend
case HiveParser.TOK_DESCFUNCTION:
case HiveParser.TOK_SHOWFUNCTIONS:
case HiveParser.TOK_EXPLAIN:
-
+
// Table DDL
case HiveParser.TOK_ALTERTABLE_ADDPARTS:
case HiveParser.TOK_ALTERTABLE_ADDCOLS:
@@ -231,9 +229,9 @@ public class HCatSemanticAnalyzer extend
default:
throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: "+ast.getToken());
}
-
+
authorizeDDL(context, rootTasks);
-
+
} catch(HCatException e){
throw new SemanticException(e);
} catch (HiveException e) {
@@ -295,7 +293,7 @@ public class HCatSemanticAnalyzer extend
DropTableDesc dropTable = work.getDropTblDesc();
if (dropTable != null) {
if (dropTable.getPartSpecs() == null) {
- // drop table is already enforced by Hive. We only check for table level location even if the
+ // drop table is already enforced by Hive. We only check for table level location even if the
// table is partitioned.
} else {
//this is actually a ALTER TABLE DROP PARITITION statement
@@ -326,9 +324,9 @@ public class HCatSemanticAnalyzer extend
}
String newLocation = alterTable.getNewLocation();
-
- /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
- * for the old table/partition location and the new location.
+
+ /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
+ * for the old table/partition location and the new location.
*/
if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
if (part != null) {
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/common/HCatUtil.java Tue May 1 16:26:30 2012
@@ -26,21 +26,15 @@ import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Properties;
-import java.util.Set;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -49,8 +43,6 @@ import org.apache.hadoop.hive.metastore.
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.plan.TableDesc;
-import org.apache.hadoop.hive.serde2.SerDe;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.thrift.DelegationTokenIdentifier;
@@ -59,12 +51,8 @@ import org.apache.hadoop.mapred.JobClien
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hcatalog.data.DataType;
-import org.apache.hcatalog.data.HCatRecord;
-import org.apache.hcatalog.data.HCatRecordSerDe;
import org.apache.hcatalog.data.Pair;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchema;
@@ -77,10 +65,12 @@ import org.apache.hcatalog.mapreduce.Out
import org.apache.hcatalog.mapreduce.PartInfo;
import org.apache.hcatalog.mapreduce.StorerInfo;
import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class HCatUtil {
- static final private Log LOG = LogFactory.getLog(HCatUtil.class);
+ private static final Logger LOG = LoggerFactory.getLogger(HCatUtil.class);
public static boolean checkJobContextIfRunningFromBackend(JobContext j) {
if (j.getConfiguration().get("mapred.task.id", "").equals("")) {
@@ -370,72 +360,6 @@ public class HCatUtil {
}
/**
- * Logging stack trace
- *
- * @param logger
- */
- public static void logStackTrace(Log logger) {
- StackTraceElement[] stackTrace = new Exception().getStackTrace();
- for (int i = 1; i < stackTrace.length; i++) {
- logger.debug("\t" + stackTrace[i].toString());
- }
- }
-
- /**
- * debug log the hive conf
- *
- * @param logger
- * @param hc
- */
- public static void logHiveConf(Log logger, HiveConf hc) {
- logEntrySet(logger, "logging hiveconf:", hc.getAllProperties()
- .entrySet());
- }
-
- public static void logList(Log logger, String itemName,
- List<? extends Object> list) {
- logger.debug(itemName + ":");
- for (Object item : list) {
- logger.debug("\t[" + item + "]");
- }
- }
-
- public static void logMap(Log logger, String itemName,
- Map<? extends Object, ? extends Object> map) {
- logEntrySet(logger, itemName, map.entrySet());
- }
-
- public static void logEntrySet(Log logger, String itemName,
- Set<? extends Entry> entrySet) {
- logIterableSet(logger,itemName,entrySet.iterator());
- }
-
- public static void logIterableSet(Log logger, String itemName, Iterator<? extends Entry> iterator){
- logger.info(itemName + ":");
- while (iterator.hasNext()){
- Entry e = iterator.next();
- logger.debug("\t[" + e.getKey() + "]=>[" + e.getValue() + "]");
- }
- }
-
- public static void logAllTokens(Log logger, JobContext context)
- throws IOException {
- for (Token<? extends TokenIdentifier> t : context.getCredentials()
- .getAllTokens()) {
- logToken(logger, "token", t);
- }
- }
-
- public static void logToken(Log logger, String itemName,
- Token<? extends TokenIdentifier> t) throws IOException {
- logger.info(itemName + ":");
- logger.info("\tencodeToUrlString : " + t.encodeToUrlString());
- logger.info("\ttoString : " + t.toString());
- logger.info("\tkind : " + t.getKind());
- logger.info("\tservice : " + t.getService());
- }
-
- /**
* Create an instance of a storage handler defined in storerInfo. If one cannot be found
* then FosterStorageHandler is used to encapsulate the InputFormat, OutputFormat and SerDe.
* This StorageHandler assumes the other supplied storage artifacts are for a file-based storage system.
@@ -451,12 +375,12 @@ public class HCatUtil {
storerInfo.getIfClass(),
storerInfo.getOfClass());
}
-
+
public static HCatStorageHandler getStorageHandler(Configuration conf, PartInfo partitionInfo) throws IOException {
return HCatUtil.getStorageHandler(
- conf,
- partitionInfo.getStorageHandlerClassName(),
- partitionInfo.getSerdeClassName(),
+ conf,
+ partitionInfo.getStorageHandlerClassName(),
+ partitionInfo.getSerdeClassName(),
partitionInfo.getInputFormatClassName(),
partitionInfo.getOutputFormatClassName());
}
@@ -477,7 +401,7 @@ public class HCatUtil {
String storageHandler,
String serDe,
String inputFormat,
- String outputFormat)
+ String outputFormat)
throws IOException {
if ((storageHandler == null) || (storageHandler.equals(FosterStorageHandler.class.getName()))){
@@ -493,7 +417,7 @@ public class HCatUtil {
}
try {
- Class<? extends HCatStorageHandler> handlerClass =
+ Class<? extends HCatStorageHandler> handlerClass =
(Class<? extends HCatStorageHandler>) Class
.forName(storageHandler, true, JavaUtils.getClassLoader());
return (HCatStorageHandler)ReflectionUtils.newInstance(
@@ -530,26 +454,26 @@ public class HCatUtil {
Map<String,String> jobProperties = new HashMap<String,String>();
try {
tableDesc.getJobProperties().put(
- HCatConstants.HCAT_KEY_JOB_INFO,
+ HCatConstants.HCAT_KEY_JOB_INFO,
HCatUtil.serialize(inputJobInfo));
storageHandler.configureInputJobProperties(tableDesc,
- jobProperties);
+ jobProperties);
} catch (IOException e) {
throw new IllegalStateException(
"Failed to configure StorageHandler",e);
}
-
+
return jobProperties;
}
- public static void
+ public static void
configureOutputStorageHandler(HCatStorageHandler storageHandler,
JobContext context,
OutputJobInfo outputJobInfo) {
- //TODO replace IgnoreKeyTextOutputFormat with a
+ //TODO replace IgnoreKeyTextOutputFormat with a
//HiveOutputFormatWrapper in StorageHandler
TableDesc tableDesc = new TableDesc(storageHandler.getSerDeClass(),
storageHandler.getInputFormatClass(),
@@ -564,7 +488,7 @@ public class HCatUtil {
Map<String,String> jobProperties = new HashMap<String,String>();
try {
tableDesc.getJobProperties().put(
- HCatConstants.HCAT_KEY_OUTPUT_INFO,
+ HCatConstants.HCAT_KEY_OUTPUT_INFO,
HCatUtil.serialize(outputJobInfo));
storageHandler.configureOutputJobProperties(tableDesc,
@@ -606,7 +530,7 @@ public class HCatUtil {
}
- public static HiveConf getHiveConf(Configuration conf)
+ public static HiveConf getHiveConf(Configuration conf)
throws IOException {
HiveConf hiveConf = new HiveConf(conf, HCatUtil.class);
@@ -614,7 +538,7 @@ public class HCatUtil {
//copy the hive conf into the job conf and restore it
//in the backend context
if( conf.get(HCatConstants.HCAT_KEY_HIVE_CONF) == null ) {
- conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
+ conf.set(HCatConstants.HCAT_KEY_HIVE_CONF,
HCatUtil.serialize(hiveConf.getAllProperties()));
} else {
//Copy configuration properties into the hive conf
@@ -625,22 +549,22 @@ public class HCatUtil {
if( prop.getValue() instanceof String ) {
hiveConf.set((String) prop.getKey(), (String) prop.getValue());
} else if( prop.getValue() instanceof Integer ) {
- hiveConf.setInt((String) prop.getKey(),
+ hiveConf.setInt((String) prop.getKey(),
(Integer) prop.getValue());
} else if( prop.getValue() instanceof Boolean ) {
- hiveConf.setBoolean((String) prop.getKey(),
+ hiveConf.setBoolean((String) prop.getKey(),
(Boolean) prop.getValue());
} else if( prop.getValue() instanceof Long ) {
hiveConf.setLong((String) prop.getKey(), (Long) prop.getValue());
} else if( prop.getValue() instanceof Float ) {
- hiveConf.setFloat((String) prop.getKey(),
+ hiveConf.setFloat((String) prop.getKey(),
(Float) prop.getValue());
}
}
}
if(conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
- hiveConf.set("hive.metastore.token.signature",
+ hiveConf.set("hive.metastore.token.signature",
conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
}
@@ -648,7 +572,7 @@ public class HCatUtil {
}
- public static JobConf getJobConfFromContext(JobContext jobContext)
+ public static JobConf getJobConfFromContext(JobContext jobContext)
{
JobConf jobConf;
// we need to convert the jobContext into a jobConf
@@ -657,7 +581,7 @@ public class HCatUtil {
jobConf = new JobConf(jobContext.getConfiguration());
// ..end of conversion
-
+
return jobConf;
}
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspector.java Tue May 1 16:26:30 2012
@@ -19,38 +19,31 @@ package org.apache.hcatalog.data;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
public class HCatRecordObjectInspector extends StandardStructObjectInspector {
- public static final Log LOG = LogFactory
- .getLog(HCatRecordObjectInspector.class.getName());
-
protected HCatRecordObjectInspector(List<String> structFieldNames,
List<ObjectInspector> structFieldObjectInspectors) {
super(structFieldNames, structFieldObjectInspectors);
}
-
+
@Override
public Object getStructFieldData(Object data, StructField fieldRef) {
if (data == null){
return new IllegalArgumentException("Data passed in to get field from was null!");
}
-
+
int fieldID = ((MyField) fieldRef).getFieldID();
if (!(fieldID >= 0 && fieldID < fields.size())){
throw new IllegalArgumentException("Invalid field index ["+fieldID+"]");
}
-
+
return ((HCatRecord) data).get(fieldID);
}
-
+
@Override
public List<Object> getStructFieldsDataAsList(Object o) {
return ((HCatRecord) o).getAll();
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordObjectInspectorFactory.java Tue May 1 16:26:30 2012
@@ -21,8 +21,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -32,23 +30,24 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* ObjectInspectorFactory for HCatRecordObjectInspectors (and associated helper inspectors)
*/
public class HCatRecordObjectInspectorFactory {
-
- public static final Log LOG = LogFactory
- .getLog(HCatRecordObjectInspectorFactory.class.getName());
- static HashMap<TypeInfo, HCatRecordObjectInspector> cachedHCatRecordObjectInspectors =
+ private final static Logger LOG = LoggerFactory.getLogger(HCatRecordObjectInspectorFactory.class);
+
+ static HashMap<TypeInfo, HCatRecordObjectInspector> cachedHCatRecordObjectInspectors =
new HashMap<TypeInfo, HCatRecordObjectInspector>();
- static HashMap<TypeInfo, ObjectInspector> cachedObjectInspectors =
+ static HashMap<TypeInfo, ObjectInspector> cachedObjectInspectors =
new HashMap<TypeInfo, ObjectInspector>();
/**
* Returns HCatRecordObjectInspector given a StructTypeInfo type definition for the record to look into
- * @param typeInfo Type definition for the record to look into
+ * @param typeInfo Type definition for the record to look into
* @return appropriate HCatRecordObjectInspector
* @throws SerDeException
*/
@@ -56,8 +55,8 @@ public class HCatRecordObjectInspectorFa
StructTypeInfo typeInfo) throws SerDeException {
HCatRecordObjectInspector oi = cachedHCatRecordObjectInspectors.get(typeInfo);
if (oi == null) {
- LOG.debug("Got asked for OI for "+typeInfo.getCategory()+"["+typeInfo.getTypeName()+"]");
+ LOG.debug("Got asked for OI for {} [{} ]",typeInfo.getCategory(),typeInfo.getTypeName());
switch (typeInfo.getCategory()) {
case STRUCT :
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
@@ -70,9 +69,9 @@ public class HCatRecordObjectInspectorFa
oi = new HCatRecordObjectInspector(fieldNames,fieldObjectInspectors);
break;
- default:
- // Hmm.. not good,
- // the only type expected here is STRUCT, which maps to HCatRecord
+ default:
+ // Hmm.. not good,
+ // the only type expected here is STRUCT, which maps to HCatRecord
// - anything else is an error. Return null as the inspector.
throw new SerDeException("TypeInfo ["+typeInfo.getTypeName()
+ "] was not of struct type - HCatRecord expected struct type, got ["
@@ -84,12 +83,12 @@ public class HCatRecordObjectInspectorFa
}
public static ObjectInspector getStandardObjectInspectorFromTypeInfo(TypeInfo typeInfo) {
-
+
ObjectInspector oi = cachedObjectInspectors.get(typeInfo);
if (oi == null){
- LOG.debug("Got asked for OI for "+typeInfo.getCategory()+"["+typeInfo.getTypeName()+"]");
+ LOG.debug("Got asked for OI for {}, [{}]",typeInfo.getCategory(), typeInfo.getTypeName());
switch (typeInfo.getCategory()) {
case PRIMITIVE:
oi = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(
@@ -99,7 +98,7 @@ public class HCatRecordObjectInspectorFa
StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
List<String> fieldNames = structTypeInfo.getAllStructFieldNames();
List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
- List<ObjectInspector> fieldObjectInspectors =
+ List<ObjectInspector> fieldObjectInspectors =
new ArrayList<ObjectInspector>(fieldTypeInfos.size());
for (int i = 0; i < fieldTypeInfos.size(); i++) {
fieldObjectInspectors.add(getStandardObjectInspectorFromTypeInfo(fieldTypeInfos.get(i)));
@@ -127,6 +126,6 @@ public class HCatRecordObjectInspectorFa
}
return oi;
}
-
-
+
+
}
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/HCatRecordSerDe.java Tue May 1 16:26:30 2012
@@ -24,8 +24,6 @@ import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.SerDe;
@@ -34,55 +32,53 @@ import org.apache.hadoop.hive.serde2.Ser
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.Writable;
-import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* SerDe class for serializing to and from HCatRecord
*/
public class HCatRecordSerDe implements SerDe {
-
- public static final Log LOG = LogFactory
- .getLog(HCatRecordSerDe.class.getName());
+
+ private static final Logger LOG = LoggerFactory.getLogger(HCatRecordSerDe.class);
public HCatRecordSerDe() throws SerDeException{
}
-
+
private List<String> columnNames;
private List<TypeInfo> columnTypes;
private StructTypeInfo rowTypeInfo;
private HCatRecordObjectInspector cachedObjectInspector;
-
+
@Override
public void initialize(Configuration conf, Properties tbl)
throws SerDeException {
- if (LOG.isDebugEnabled()){
- LOG.debug("Initializing HCatRecordSerDe");
- HCatUtil.logEntrySet(LOG, "props to serde", tbl.entrySet());
- }
-
+ LOG.debug("Initializing HCatRecordSerDe");
+ LOG.debug("props to serde: {}",tbl.entrySet());
+
// Get column names and types
String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
-
+
// all table column names
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
-
+
// all column types
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
@@ -90,40 +86,27 @@ public class HCatRecordSerDe implements
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
- if (LOG.isDebugEnabled()){
- LOG.debug("columns:" + columnNameProperty);
- for (String s : columnNames){
- LOG.debug("cn:"+s);
- }
- LOG.debug("types: " + columnTypeProperty);
- for (TypeInfo t : columnTypes){
- LOG.debug("ct:"+t.getTypeName()+",type:"+t.getCategory());
- }
- }
-
-
+
+ LOG.debug("columns: {}",columnNameProperty,columnNames);
+ LOG.debug("types: {}", columnTypeProperty, columnTypes);
assert (columnNames.size() == columnTypes.size());
-
+
rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
-
cachedObjectInspector = HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
-
}
-
+
public void initialize(HCatSchema hsch) throws SerDeException {
- if (LOG.isDebugEnabled()){
- LOG.debug("Initializing HCatRecordSerDe through HCatSchema" + hsch.toString());
- }
-
+ LOG.debug("Initializing HCatRecordSerDe through HCatSchema {}." ,hsch);
+
rowTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(hsch.toString());
cachedObjectInspector = HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
-
+
}
-
+
/**
- * The purpose of a deserialize method is to turn a data blob
+ * The purpose of a deserialize method is to turn a data blob
* which is a writable representation of the data into an
* object that can then be parsed using the appropriate
* ObjectInspector. In this case, since HCatRecord is directly
@@ -145,10 +128,10 @@ public class HCatRecordSerDe implements
* The purpose of the serialize method is to turn an object-representation
* with a provided ObjectInspector into a Writable format, which
* the underlying layer can then use to write out.
- *
+ *
* In this case, it means that Hive will call this method to convert
- * an object with appropriate objectinspectors that it knows about,
- * to write out a HCatRecord.
+ * an object with appropriate objectinspectors that it knows about,
+ * to write out a HCatRecord.
*/
@Override
public Writable serialize(Object obj, ObjectInspector objInspector)
@@ -161,12 +144,12 @@ public class HCatRecordSerDe implements
return new DefaultHCatRecord((List<Object>)serializeStruct(obj,(StructObjectInspector)objInspector));
}
-
+
/**
- * Return serialized HCatRecord from an underlying
+ * Return serialized HCatRecord from an underlying
* object-representation, and readable by an ObjectInspector
* @param obj : Underlying object-representation
- * @param soi : StructObjectInspector
+ * @param soi : StructObjectInspector
* @return HCatRecord
*/
private static List<?> serializeStruct(Object obj, StructObjectInspector soi)
@@ -174,7 +157,7 @@ public class HCatRecordSerDe implements
List<? extends StructField> fields = soi.getAllStructFieldRefs();
List<Object> list = soi.getStructFieldsDataAsList(obj);
-
+
if (list == null){
return null;
}
@@ -195,7 +178,7 @@ public class HCatRecordSerDe implements
}
/**
- * Return underlying Java Object from an object-representation
+ * Return underlying Java Object from an object-representation
* that is readable by a provided ObjectInspector.
*/
public static Object serializeField(Object field,
@@ -210,7 +193,7 @@ public class HCatRecordSerDe implements
} else if (fieldObjectInspector.getCategory() == Category.MAP){
res = serializeMap(field,(MapObjectInspector)fieldObjectInspector);
} else {
- throw new SerDeException(HCatRecordSerDe.class.toString()
+ throw new SerDeException(HCatRecordSerDe.class.toString()
+ " does not know what to do with fields of unknown category: "
+ fieldObjectInspector.getCategory() + " , type: " + fieldObjectInspector.getTypeName());
}
@@ -270,7 +253,7 @@ public class HCatRecordSerDe implements
}
return list;
} else {
- throw new SerDeException(HCatRecordSerDe.class.toString()
+ throw new SerDeException(HCatRecordSerDe.class.toString()
+ " does not know what to do with fields of unknown category: "
+ eloi.getCategory() + " , type: " + eloi.getTypeName());
}
@@ -278,8 +261,8 @@ public class HCatRecordSerDe implements
/**
- * Return an object inspector that can read through the object
- * that we return from deserialize(). To wit, that means we need
+ * Return an object inspector that can read through the object
+ * that we return from deserialize(). To wit, that means we need
* to return an ObjectInspector that can read HCatRecord, given
* the type info for it during initialize(). This also means
* that this method cannot and should not be called before initialize()
@@ -300,5 +283,5 @@ public class HCatRecordSerDe implements
return null;
}
-
+
}
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/JsonSerDe.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/JsonSerDe.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/JsonSerDe.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/data/JsonSerDe.java Tue May 1 16:26:30 2012
@@ -27,9 +27,6 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.SerDe;
@@ -43,7 +40,6 @@ import org.apache.hadoop.hive.serde2.obj
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
@@ -57,56 +53,53 @@ import org.apache.hadoop.hive.serde2.typ
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatFieldSchema.Type;
import org.apache.hcatalog.data.schema.HCatSchema;
import org.apache.hcatalog.data.schema.HCatSchemaUtils;
-
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class JsonSerDe implements SerDe {
- public static final Log LOG = LogFactory
- .getLog(JsonSerDe.class.getName());
-
+ private static final Logger LOG = LoggerFactory.getLogger(JsonSerDe.class);
private List<String> columnNames;
private List<TypeInfo> columnTypes;
-
+
private StructTypeInfo rowTypeInfo;
private HCatSchema schema;
private JsonFactory jsonFactory = null;
-
+
private HCatRecordObjectInspector cachedObjectInspector;
@Override
public void initialize(Configuration conf, Properties tbl)
throws SerDeException {
- if (LOG.isDebugEnabled()){
- LOG.debug("Initializing JsonSerDe");
- HCatUtil.logEntrySet(LOG, "props to serde", tbl.entrySet());
- }
-
+
+ LOG.debug("Initializing JsonSerDe");
+ LOG.debug("props to serde: {}",tbl.entrySet());
+
+
// Get column names and types
String columnNameProperty = tbl.getProperty(Constants.LIST_COLUMNS);
String columnTypeProperty = tbl.getProperty(Constants.LIST_COLUMN_TYPES);
-
+
// all table column names
if (columnNameProperty.length() == 0) {
columnNames = new ArrayList<String>();
} else {
columnNames = Arrays.asList(columnNameProperty.split(","));
}
-
+
// all column types
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
@@ -114,28 +107,18 @@ public class JsonSerDe implements SerDe
columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
}
- if (LOG.isDebugEnabled()){
- LOG.debug("columns:" + columnNameProperty);
- for (String s : columnNames){
- LOG.debug("cn:"+s);
- }
- LOG.debug("types: " + columnTypeProperty);
- for (TypeInfo t : columnTypes){
- LOG.debug("ct:"+t.getTypeName()+",type:"+t.getCategory());
- }
- }
-
+ LOG.debug("columns: {}, {}" , columnNameProperty, columnNames);
+ LOG.debug("types: {}, {} ", columnTypeProperty, columnTypes);
+
assert (columnNames.size() == columnTypes.size());
-
+
rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
cachedObjectInspector = HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(rowTypeInfo);
try {
schema = HCatSchemaUtils.getHCatSchema(rowTypeInfo).get(0).getStructSubSchema();
- if (LOG.isDebugEnabled()){
- LOG.debug("schema : "+ schema);
- LOG.debug("\tfields : "+schema.getFieldNames());
- }
+ LOG.debug("schema : {}", schema);
+ LOG.debug("fields : {}", schema.getFieldNames());
} catch (HCatException e) {
throw new SerDeException(e);
}
@@ -144,15 +127,15 @@ public class JsonSerDe implements SerDe
}
/**
- * Takes JSON string in Text form, and has to return an object representation above
+ * Takes JSON string in Text form, and has to return an object representation above
* it that's readable by the corresponding object inspector.
- *
+ *
* For this implementation, since we're using the jackson parser, we can construct
* our own object implementation, and we use HCatRecord for it
*/
@Override
public Object deserialize(Writable blob) throws SerDeException {
-
+
Text t = (Text)blob;
JsonParser p;
List<Object> r = new ArrayList<Object>(Collections.nCopies(columnNames.size(), null));
@@ -167,13 +150,13 @@ public class JsonSerDe implements SerDe
populateRecord(r,token,p,schema);
}
} catch (JsonParseException e) {
- LOG.warn("Error ["+ e.getMessage()+"] parsing json text ["+t+"]");
+ LOG.warn("Error [{}] parsing json location [{}].", e.getMessage(), e.getLocation());
throw new SerDeException(e);
} catch (IOException e) {
- LOG.warn("Error ["+ e.getMessage()+"] parsing json text ["+t+"]");
+ LOG.warn("Error [{}] parsing json text [{}].", e.getMessage(),t);
throw new SerDeException(e);
}
-
+
return new DefaultHCatRecord(r);
}
@@ -190,13 +173,13 @@ public class JsonSerDe implements SerDe
/**
* Utility method to extract current expected field from given JsonParser
- *
+ *
* To get the field, we need either a type or a hcatFieldSchema(necessary for complex types)
- * It is possible that one of them can be null, and so, if so, the other is instantiated
+ * It is possible that one of them can be null, and so, if so, the other is instantiated
* from the other
- *
- * isTokenCurrent is a boolean variable also passed in, which determines
- * if the JsonParser is already at the token we expect to read next, or
+ *
+ * isTokenCurrent is a boolean variable also passed in, which determines
+ * if the JsonParser is already at the token we expect to read next, or
* needs advancing to the next before we read.
*/
private Object extractCurrentField(JsonParser p, Type t,
@@ -247,7 +230,7 @@ public class JsonSerDe implements SerDe
throw new IOException("JsonSerDe does not support BINARY type");
case ARRAY:
if (valueToken == JsonToken.VALUE_NULL){
- val = null;
+ val = null;
break;
}
if (valueToken != JsonToken.START_ARRAY){
@@ -261,7 +244,7 @@ public class JsonSerDe implements SerDe
break;
case MAP:
if (valueToken == JsonToken.VALUE_NULL){
- val = null;
+ val = null;
break;
}
if (valueToken != JsonToken.START_OBJECT){
@@ -274,7 +257,7 @@ public class JsonSerDe implements SerDe
Object k = getObjectOfCorrespondingPrimitiveType(p.getCurrentName(),keyType);
Object v;
if (valueSchema.getType() == HCatFieldSchema.Type.STRUCT){
- v = extractCurrentField(p,null, valueSchema,false);
+ v = extractCurrentField(p,null, valueSchema,false);
} else {
v = extractCurrentField(p,null, valueSchema,true);
}
@@ -285,7 +268,7 @@ public class JsonSerDe implements SerDe
break;
case STRUCT:
if (valueToken == JsonToken.VALUE_NULL){
- val = null;
+ val = null;
break;
}
if (valueToken != JsonToken.START_OBJECT){
@@ -293,7 +276,7 @@ public class JsonSerDe implements SerDe
}
HCatSchema subSchema = hcatFieldSchema.getStructSubSchema();
int sz = subSchema.getFieldNames().size();
-
+
List<Object> struct = new ArrayList<Object>(Collections.nCopies(sz, null));
while ((valueToken = p.nextToken()) != JsonToken.END_OBJECT) {
populateRecord(struct, valueToken, p, subSchema);
@@ -360,7 +343,7 @@ public class JsonSerDe implements SerDe
}
} catch (IOException e) {
- LOG.warn("Error ["+ e.getMessage()+"] generating json text from object");
+ LOG.warn("Error generating json text from object.", e);
throw new SerDeException(e);
}
return new Text(sb.toString());
@@ -530,7 +513,7 @@ public class JsonSerDe implements SerDe
}
}
-
+
/**
* Returns an object inspector for the specified schema that
* is capable of reading in the object representation of the JSON string
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/DefaultOutputCommitterContainer.java Tue May 1 16:26:30 2012
@@ -20,16 +20,16 @@ package org.apache.hcatalog.mapreduce;
import java.io.IOException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.mapred.HCatMapRedUtil;
import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.JobStatus.State;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Part of the DefaultOutput*Container classes
@@ -37,7 +37,7 @@ import org.apache.hcatalog.common.HCatUt
*/
class DefaultOutputCommitterContainer extends OutputCommitterContainer {
- private static final Log LOG = LogFactory.getLog(DefaultOutputCommitterContainer.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DefaultOutputCommitterContainer.class);
/**
* @param context current JobContext
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatRecordReader.java Tue May 1 16:26:30 2012
@@ -19,39 +19,30 @@ package org.apache.hcatalog.mapreduce;
import java.io.IOException;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.SerDe;
-
import org.apache.hcatalog.common.HCatConstants;
import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.DefaultHCatRecord;
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.data.LazyHCatRecord;
import org.apache.hcatalog.data.schema.HCatSchema;
-import org.apache.hcatalog.data.schema.HCatFieldSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
-/** The HCat wrapper for the underlying RecordReader,
+/** The HCat wrapper for the underlying RecordReader,
* this ensures that the initialize on
- * the underlying record reader is done with the underlying split,
+ * the underlying record reader is done with the underlying split,
* not with HCatSplit.
*/
class HCatRecordReader extends RecordReader<WritableComparable, HCatRecord> {
-
- Log LOG = LogFactory.getLog(HCatRecordReader.class);
+
+ private static final Logger LOG = LoggerFactory.getLogger(HCatRecordReader.class);
WritableComparable currentKey;
Writable currentValue;
@@ -74,28 +65,28 @@ class HCatRecordReader extends RecordRea
* Instantiates a new hcat record reader.
* @param baseRecordReader the base record reader
*/
- public HCatRecordReader(HCatStorageHandler storageHandler,
- org.apache.hadoop.mapred.RecordReader<WritableComparable,
- Writable> baseRecordReader,
- SerDe serde,
+ public HCatRecordReader(HCatStorageHandler storageHandler,
+ org.apache.hadoop.mapred.RecordReader<WritableComparable,
+ Writable> baseRecordReader,
+ SerDe serde,
Map<String,String> valuesNotInDataCols) {
this.baseRecordReader = baseRecordReader;
this.storageHandler = storageHandler;
this.serde = serde;
this.valuesNotInDataCols = valuesNotInDataCols;
}
-
+
/* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.RecordReader#initialize(
- * org.apache.hadoop.mapreduce.InputSplit,
+ * org.apache.hadoop.mapreduce.InputSplit,
* org.apache.hadoop.mapreduce.TaskAttemptContext)
*/
@Override
- public void initialize(org.apache.hadoop.mapreduce.InputSplit split,
+ public void initialize(org.apache.hadoop.mapreduce.InputSplit split,
TaskAttemptContext taskContext)
throws IOException, InterruptedException {
org.apache.hadoop.mapred.InputSplit baseSplit;
-
+
// Pull the output schema out of the TaskAttemptContext
outputSchema = (HCatSchema)HCatUtil.deserialize(
taskContext.getConfiguration().get(HCatConstants.HCAT_KEY_OUTPUT_SCHEMA));
@@ -113,9 +104,9 @@ class HCatRecordReader extends RecordRea
// Pull the table schema out of the Split info
// TODO This should be passed in the TaskAttemptContext instead
dataSchema = ((HCatSplit)split).getDataSchema();
-
+
Properties properties = new Properties();
- for (Map.Entry<String, String>param :
+ for (Map.Entry<String, String>param :
((HCatSplit)split).getPartitionInfo()
.getJobProperties().entrySet()) {
properties.setProperty(param.getKey(), param.getValue());
@@ -126,7 +117,7 @@ class HCatRecordReader extends RecordRea
* @see org.apache.hadoop.mapreduce.RecordReader#getCurrentKey()
*/
@Override
- public WritableComparable getCurrentKey()
+ public WritableComparable getCurrentKey()
throws IOException, InterruptedException {
return currentKey;
}
@@ -135,7 +126,7 @@ class HCatRecordReader extends RecordRea
* @see org.apache.hadoop.mapreduce.RecordReader#getCurrentValue()
*/
@Override
- public HCatRecord getCurrentValue()
+ public HCatRecord getCurrentValue()
throws IOException, InterruptedException {
HCatRecord r;
@@ -153,10 +144,10 @@ class HCatRecordReader extends RecordRea
}
i++;
}
-
+
return dr;
-
- } catch (Exception e) {
+
+ } catch (Exception e) {
throw new IOException("Failed to create HCatRecord ",e);
}
}
@@ -169,8 +160,7 @@ class HCatRecordReader extends RecordRea
try {
return baseRecordReader.getProgress();
} catch (IOException e) {
- LOG.warn(e.getMessage());
- LOG.warn(e.getStackTrace());
+ LOG.warn("Exception in HCatRecord reader",e);
}
return 0.0f; // errored
}
@@ -185,7 +175,7 @@ class HCatRecordReader extends RecordRea
currentValue = baseRecordReader.createValue();
}
- return baseRecordReader.next(currentKey,
+ return baseRecordReader.next(currentKey,
currentValue);
}
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/HCatSplit.java Tue May 1 16:26:30 2012
@@ -22,24 +22,20 @@ import java.io.DataOutput;
import java.io.IOException;
import java.lang.reflect.Constructor;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapreduce.InputSplit;
-
import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/** The HCatSplit wrapper around the InputSplit returned by the underlying InputFormat */
-public class HCatSplit extends InputSplit
+public class HCatSplit extends InputSplit
implements Writable,org.apache.hadoop.mapred.InputSplit {
- Log LOG = LogFactory.getLog(HCatSplit.class);
-
+ private static final Logger LOG = LoggerFactory.getLogger(HCatSplit.class);
/** The partition info for the split. */
private PartInfo partitionInfo;
@@ -64,7 +60,7 @@ public class HCatSplit extends InputSpli
* @param baseMapRedSplit the base mapred split
* @param tableSchema the table level schema
*/
- public HCatSplit(PartInfo partitionInfo,
+ public HCatSplit(PartInfo partitionInfo,
org.apache.hadoop.mapred.InputSplit baseMapRedSplit,
HCatSchema tableSchema) {
@@ -97,7 +93,7 @@ public class HCatSplit extends InputSpli
public HCatSchema getDataSchema() {
return this.partitionInfo.getPartitionSchema();
}
-
+
/**
* Gets the table schema.
* @return the table schema
@@ -114,8 +110,7 @@ public class HCatSplit extends InputSpli
try {
return baseMapRedSplit.getLength();
} catch (IOException e) {
- LOG.warn(e.getMessage());
- LOG.warn(e.getStackTrace());
+ LOG.warn("Exception in HCatSplit",e);
}
return 0; // we errored
}
@@ -128,8 +123,7 @@ public class HCatSplit extends InputSpli
try {
return baseMapRedSplit.getLocations();
} catch (IOException e) {
- LOG.warn(e.getMessage());
- LOG.warn(e.getStackTrace());
+ LOG.warn("Exception in HCatSplit",e);
}
return new String[0]; // we errored
}
@@ -151,7 +145,7 @@ public class HCatSplit extends InputSpli
//Class.forName().newInstance() does not work if the underlying
//InputSplit has package visibility
- Constructor<? extends org.apache.hadoop.mapred.InputSplit>
+ Constructor<? extends org.apache.hadoop.mapred.InputSplit>
constructor =
splitClass.getDeclaredConstructor(new Class[]{});
constructor.setAccessible(true);
Modified: incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/java/org/apache/hcatalog/mapreduce/InitializeInput.java Tue May 1 16:26:30 2012
@@ -24,8 +24,6 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
@@ -39,6 +37,8 @@ import org.apache.hcatalog.common.HCatCo
import org.apache.hcatalog.common.HCatException;
import org.apache.hcatalog.common.HCatUtil;
import org.apache.hcatalog.data.schema.HCatSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* The Class which handles querying the metadata server using the MetaStoreClient. The list of
@@ -48,7 +48,7 @@ import org.apache.hcatalog.data.schema.H
*/
public class InitializeInput {
- private static final Log LOG = LogFactory.getLog(InitializeInput.class);
+ private static final Logger LOG = LoggerFactory.getLogger(InitializeInput.class);
/**
* Set the input to use for the Job. This queries the metadata server with the specified partition predicates,
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestEximSemanticAnalysis.java Tue May 1 16:26:30 2012
@@ -23,8 +23,6 @@ import java.net.URI;
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hive.cli.CliSessionState;
@@ -39,6 +37,8 @@ import org.apache.hadoop.hive.ql.session
import org.apache.hcatalog.MiniCluster;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.common.HCatConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestEximSemanticAnalysis extends TestCase {
@@ -47,7 +47,7 @@ public class TestEximSemanticAnalysis ex
private HiveConf hcatConf;
private HCatDriver hcatDriver;
private Warehouse wh;
- private static final Log LOG = LogFactory.getLog(TestEximSemanticAnalysis.class);
+ private static final Logger LOG = LoggerFactory.getLogger(TestEximSemanticAnalysis.class);
@Override
protected void setUp() throws Exception {
@@ -93,7 +93,7 @@ public class TestEximSemanticAnalysis ex
Runtime.getRuntime().exec("rm -rf /tmp/hcat");
response = hcatDriver.run("drop table junit_sem_analysis");
if (response.getResponseCode() != 0) {
- System.err.println(response.getErrorMessage());
+ LOG.error(response.getErrorMessage());
fail("Drop table failed");
}
}
@@ -130,7 +130,7 @@ public class TestEximSemanticAnalysis ex
cluster.getFileSystem().setPermission(whPath, FsPermission.valueOf("-rwxrwxrwx"));
response = hcatDriver.run("drop table junit_sem_analysis");
if (response.getResponseCode() != 0) {
- System.err.println(response.getErrorMessage());
+ LOG.error(response.getErrorMessage());
fail("Drop table failed");
}
}
@@ -164,7 +164,7 @@ public class TestEximSemanticAnalysis ex
response = hcatDriver.run("drop table junit_sem_analysis_imported");
if (response.getResponseCode() != 0) {
- System.err.println(response.getErrorMessage());
+ LOG.error(response.getErrorMessage());
fail("Drop table failed");
}
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestPermsGrp.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestPermsGrp.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestPermsGrp.java Tue May 1 16:26:30 2012
@@ -49,6 +49,8 @@ import org.apache.hcatalog.cli.HCatCli;
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.common.HCatConstants;
import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestPermsGrp extends TestCase {
@@ -58,6 +60,7 @@ public class TestPermsGrp extends TestCa
private Warehouse clientWH;
private Thread t;
private HiveMetaStoreClient msc;
+ private static final Logger LOG = LoggerFactory.getLogger(TestPermsGrp.class);
private static class RunMS implements Runnable {
@@ -66,7 +69,7 @@ public class TestPermsGrp extends TestCa
try {
HiveMetaStore.main(new String[]{"-v","-p",msPort});
} catch(Throwable t) {
- System.err.println("Exiting. Got exception from metastore: " + t.getMessage());
+ LOG.error("Exiting. Got exception from metastore: ", t);
}
}
@@ -193,8 +196,7 @@ public class TestPermsGrp extends TestCa
}
} catch (Exception e) {
- System.err.println(StringUtils.stringifyException(e));
- System.err.println("testCustomPerms failed.");
+ LOG.error("testCustomPerms failed.", e);
throw e;
}
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java Tue May 1 16:26:30 2012
@@ -44,12 +44,15 @@ import org.apache.hadoop.mapred.TextInpu
import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
import org.apache.hcatalog.listener.NotificationListener;
import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestSemanticAnalysis extends TestCase{
private Driver hcatDriver;
private Driver hiveDriver;
private HiveMetaStoreClient msc;
+ private static final Logger LOG = LoggerFactory.getLogger(TestSemanticAnalysis.class);
@Override
protected void setUp() throws Exception {
@@ -62,7 +65,7 @@ public class TestSemanticAnalysis extend
HiveConf hiveConf = new HiveConf(hcatConf,this.getClass());
hiveDriver = new Driver(hiveConf);
-
+
hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
hcatDriver = new Driver(hcatConf);
@@ -83,7 +86,7 @@ public class TestSemanticAnalysis extend
assertTrue(result.get(0).contains("mydb.db"));
hcatDriver.run("drop database mydb cascade");
}
-
+
public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException{
hiveDriver.run("drop table junit_sem_analysis");
CommandProcessorResponse resp = hiveDriver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
@@ -93,7 +96,7 @@ public class TestSemanticAnalysis extend
assertEquals("Partition key name case problem", "b" , tbl.getPartitionKeys().get(0).getName());
hiveDriver.run("drop table junit_sem_analysis");
}
-
+
public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
hiveDriver.run("drop table junit_sem_analysis");
@@ -117,9 +120,9 @@ public class TestSemanticAnalysis extend
public void testUsNonExistentDB() throws CommandNeedRetryException {
- assertEquals(9, hcatDriver.run("use no_such_db").getResponseCode());
+ assertEquals(9, hcatDriver.run("use no_such_db").getResponseCode());
}
-
+
public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
List<String> dbs = msc.getAllDatabases();
@@ -208,7 +211,7 @@ public class TestSemanticAnalysis extend
response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
assertEquals(0, response.getResponseCode());
assertNull(response.getErrorMessage());
-
+
response = hcatDriver.run("describe extended junit_sem_analysis");
assertEquals(0, response.getResponseCode());
Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
@@ -358,7 +361,7 @@ public class TestSemanticAnalysis extend
hcatDriver.run("drop table junit_sem_analysis");
}
catch( Exception e){
- System.err.println(e.getMessage());
+ LOG.error("Error in drop table.",e);
}
query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java Tue May 1 16:26:30 2012
@@ -22,22 +22,22 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.cli.CliSessionState;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hcatalog.MiniCluster;
-import org.apache.hcatalog.common.HCatUtil;
-import org.apache.hcatalog.mapreduce.HCatOutputFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Helper class for Other Data Testers
*/
public class HCatDataCheckUtil {
+ private static final Logger LOG = LoggerFactory.getLogger(HCatDataCheckUtil.class);
+
public static Driver instantiateDriver(MiniCluster cluster) {
HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
for (Entry e : cluster.getProperties().entrySet()){
@@ -46,10 +46,8 @@ public class HCatDataCheckUtil {
hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-
- Log logger = LogFactory.getLog(HCatOutputFormat.class);
- HCatUtil.logHiveConf(logger , hiveConf);
-
+
+ LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
Driver driver = new Driver(hiveConf);
SessionState.start(new CliSessionState(hiveConf));
return driver;
@@ -82,13 +80,11 @@ public class HCatDataCheckUtil {
driver.run(selectCmd);
ArrayList<String> src_values = new ArrayList<String>();
driver.getResults(src_values);
- for (String s : src_values){
- System.out.println(name+":"+s);
- }
+ LOG.info("{} : {}", name, src_values);
return src_values;
}
-
+
public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
return (compareRecords(first,second) == 0);
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java Tue May 1 16:26:30 2012
@@ -24,18 +24,20 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.DelimitedJSONSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.io.Writable;
-import org.apache.hcatalog.common.HCatUtil;
-
-import junit.framework.Assert;
-import junit.framework.TestCase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestHCatRecordSerDe extends TestCase{
+ private static final Logger LOG = LoggerFactory.getLogger(TestHCatRecordSerDe.class);
+
public Map<Properties,HCatRecord> getData(){
Map<Properties,HCatRecord> data = new HashMap<Properties,HCatRecord>();
@@ -53,12 +55,12 @@ public class TestHCatRecordSerDe extends
innerStruct.add(new String("abc"));
innerStruct.add(new String("def"));
rlist.add(innerStruct);
-
+
List<Integer> innerList = new ArrayList<Integer>();
innerList.add(314);
innerList.add(007);
rlist.add(innerList);
-
+
Map<Short, String> map = new HashMap<Short, String>(3);
map.put(new Short("2"), "hcat is cool");
map.put(new Short("3"), "is it?");
@@ -66,7 +68,7 @@ public class TestHCatRecordSerDe extends
rlist.add(map);
rlist.add(new Boolean(true));
-
+
List<Object> c1 = new ArrayList<Object>();
List<Object> c1_1 = new ArrayList<Object>();
c1_1.add(new Integer(12));
@@ -95,13 +97,13 @@ public class TestHCatRecordSerDe extends
aa.add(aa_1);
rlist.add(aa);
- String typeString =
+ String typeString =
"tinyint,smallint,int,bigint,double,float,string,string,"
+ "struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
+ "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>,"
+ "array<map<string,string>>,array<array<string>>";
Properties props = new Properties();
-
+
props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1,am,aa");
props.put(Constants.LIST_COLUMN_TYPES, typeString);
// props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
@@ -118,46 +120,46 @@ public class TestHCatRecordSerDe extends
for (Entry<Properties,HCatRecord> e : getData().entrySet()){
Properties tblProps = e.getKey();
HCatRecord r = e.getValue();
-
+
HCatRecordSerDe hrsd = new HCatRecordSerDe();
hrsd.initialize(conf, tblProps);
- System.out.println("ORIG:"+r.toString());
+ LOG.info("ORIG: {}", r);
Writable s = hrsd.serialize(r,hrsd.getObjectInspector());
- System.out.println("ONE:"+s.toString());
+ LOG.info("ONE: {}", s);
HCatRecord r2 = (HCatRecord) hrsd.deserialize(s);
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r,r2));
- // If it went through correctly, then s is also a HCatRecord,
- // and also equal to the above, and a deepcopy, and this holds
+ // If it went through correctly, then s is also a HCatRecord,
+ // and also equal to the above, and a deepcopy, and this holds
// through for multiple levels more of serialization as well.
Writable s2 = hrsd.serialize(s, hrsd.getObjectInspector());
- System.out.println("TWO:"+s2.toString());
+ LOG.info("TWO: {}", s2);
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r,(HCatRecord)s));
Assert.assertTrue(HCatDataCheckUtil.recordsEqual(r,(HCatRecord)s2));
-
+
// serialize using another serde, and read out that object repr.
LazySimpleSerDe testSD = new LazySimpleSerDe();
testSD.initialize(conf, tblProps);
Writable s3 = testSD.serialize(s, hrsd.getObjectInspector());
- System.out.println("THREE:"+s3.toString());
+ LOG.info("THREE: {}",s3);
Object o3 = testSD.deserialize(s3);
Assert.assertFalse(r.getClass().equals(o3.getClass()));
// then serialize again using hrsd, and compare results
HCatRecord s4 = (HCatRecord) hrsd.serialize(o3, testSD.getObjectInspector());
- System.out.println("FOUR:"+s4.toString());
+ LOG.info("FOUR: {}", s4);
// Test LazyHCatRecord init and read
LazyHCatRecord s5 = new LazyHCatRecord(o3,testSD.getObjectInspector());
- System.out.println("FIVE:"+s5.toString());
+ LOG.info("FIVE: {}",s5);
LazyHCatRecord s6 = new LazyHCatRecord(s4,hrsd.getObjectInspector());
- System.out.println("SIX:"+s6.toString());
+ LOG.info("SIX: {}", s6);
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestJsonSerDe.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestJsonSerDe.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestJsonSerDe.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/TestJsonSerDe.java Tue May 1 16:26:30 2012
@@ -21,19 +21,20 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Map.Entry;
import java.util.Properties;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
-
-import junit.framework.Assert;
-import junit.framework.TestCase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestJsonSerDe extends TestCase{
+ private static final Logger LOG = LoggerFactory.getLogger(TestJsonSerDe.class);
+
public List<Pair<Properties,HCatRecord>> getData(){
List<Pair<Properties,HCatRecord>> data = new ArrayList<Pair<Properties,HCatRecord>>();
@@ -51,12 +52,12 @@ public class TestJsonSerDe extends TestC
innerStruct.add(new String("abc"));
innerStruct.add(new String("def"));
rlist.add(innerStruct);
-
+
List<Integer> innerList = new ArrayList<Integer>();
innerList.add(314);
innerList.add(007);
rlist.add(innerList);
-
+
Map<Short, String> map = new HashMap<Short, String>(3);
map.put(new Short("2"), "hcat is cool");
map.put(new Short("3"), "is it?");
@@ -64,7 +65,7 @@ public class TestJsonSerDe extends TestC
rlist.add(map);
rlist.add(new Boolean(true));
-
+
List<Object> c1 = new ArrayList<Object>();
List<Object> c1_1 = new ArrayList<Object>();
c1_1.add(new Integer(12));
@@ -81,7 +82,7 @@ public class TestJsonSerDe extends TestC
c1_1.add(i2);
c1.add(c1_1);
rlist.add(c1);
-
+
List<Object> nlist = new ArrayList<Object>(13);
nlist.add(null); // tinyint
nlist.add(null); // smallint
@@ -97,12 +98,12 @@ public class TestJsonSerDe extends TestC
nlist.add(null); // bool
nlist.add(null); // complex
- String typeString =
+ String typeString =
"tinyint,smallint,int,bigint,double,float,string,string,"
+ "struct<a:string,b:string>,array<int>,map<smallint,string>,boolean,"
+ "array<struct<i1:int,i2:struct<ii1:array<int>,ii2:map<string,struct<iii1:int>>>>>";
Properties props = new Properties();
-
+
props.put(Constants.LIST_COLUMNS, "ti,si,i,bi,d,f,s,n,r,l,m,b,c1");
props.put(Constants.LIST_COLUMN_TYPES, typeString);
// props.put(Constants.SERIALIZATION_NULL_FORMAT, "\\N");
@@ -120,26 +121,26 @@ public class TestJsonSerDe extends TestC
for (Pair<Properties,HCatRecord> e : getData()){
Properties tblProps = e.first;
HCatRecord r = e.second;
-
+
HCatRecordSerDe hrsd = new HCatRecordSerDe();
hrsd.initialize(conf, tblProps);
JsonSerDe jsde = new JsonSerDe();
jsde.initialize(conf, tblProps);
-
- System.out.println("ORIG:"+r.toString());
+
+ LOG.info("ORIG:{}",r);
Writable s = hrsd.serialize(r,hrsd.getObjectInspector());
- System.out.println("ONE:"+s.toString());
-
+ LOG.info("ONE:{}",s);
+
Object o1 = hrsd.deserialize(s);
assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o1));
-
+
Writable s2 = jsde.serialize(o1, hrsd.getObjectInspector());
- System.out.println("TWO:"+s2.toString());
+ LOG.info("TWO:{}",s2);
Object o2 = jsde.deserialize(s2);
- System.out.println("deserialized TWO : "+o2);
-
+ LOG.info("deserialized TWO : {} ", o2);
+
assertTrue(HCatDataCheckUtil.recordsEqual(r, (HCatRecord) o2));
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/data/schema/TestHCatSchemaUtils.java Tue May 1 16:26:30 2012
@@ -24,13 +24,13 @@ import junit.framework.TestCase;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hcatalog.common.HCatException;
-import org.apache.hcatalog.data.schema.HCatFieldSchema;
-import org.apache.hcatalog.data.schema.HCatSchema;
-import org.apache.hcatalog.data.schema.HCatSchemaUtils;
import org.apache.hcatalog.data.schema.HCatFieldSchema.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestHCatSchemaUtils extends TestCase {
+ private static final Logger LOG = LoggerFactory.getLogger(TestHCatSchemaUtils.class);
public void testSimpleOperation() throws Exception{
String typeString = "struct<name:string,studentid:int,"
+ "contact:struct<phno:string,email:string>,"
@@ -41,8 +41,8 @@ public class TestHCatSchemaUtils extends
TypeInfo ti = TypeInfoUtils.getTypeInfoFromTypeString(typeString);
HCatSchema hsch = HCatSchemaUtils.getHCatSchemaFromTypeString(typeString);
- System.out.println(ti.getTypeName());
- System.out.println(hsch.toString());
+ LOG.info(ti.getTypeName());
+ LOG.info("HCatSchema : {}",hsch);
assertEquals(ti.getTypeName(),hsch.toString());
assertEquals(hsch.toString(),typeString);
}
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/HCatMapReduceTest.java Tue May 1 16:26:30 2012
@@ -58,6 +58,8 @@ import org.apache.hcatalog.data.DefaultH
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
/**
* Test for HCatOutputFormat. Writes a partition using HCatOutputFormat and reads
@@ -65,6 +67,7 @@ import org.apache.hcatalog.data.schema.H
*/
public abstract class HCatMapReduceTest extends TestCase {
+ private static final Logger LOG = LoggerFactory.getLogger(HCatMapReduceTest.class);
protected String dbName = "default";
protected String tableName = "testHCatMapReduceTable";
@@ -104,7 +107,7 @@ public abstract class HCatMapReduceTest
thriftUri = System.getenv("HCAT_METASTORE_URI");
if( thriftUri != null ) {
- System.out.println("Using URI " + thriftUri);
+ LOG.info("Using URI {}", thriftUri);
hiveConf.set("hive.metastore.local", "false");
hiveConf.set(HiveConf.ConfVars.METASTOREURIS.varname, thriftUri);
Modified: incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java?rev=1332740&r1=1332739&r2=1332740&view=diff
==============================================================================
--- incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java (original)
+++ incubator/hcatalog/branches/branch-0.4/src/test/org/apache/hcatalog/mapreduce/TestHCatDynamicPartitioned.java Tue May 1 16:26:30 2012
@@ -20,16 +20,10 @@ package org.apache.hcatalog.mapreduce;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hcatalog.common.ErrorType;
import org.apache.hcatalog.common.HCatConstants;
@@ -38,11 +32,14 @@ import org.apache.hcatalog.data.DefaultH
import org.apache.hcatalog.data.HCatRecord;
import org.apache.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hcatalog.data.schema.HCatSchemaUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
public class TestHCatDynamicPartitioned extends HCatMapReduceTest {
private List<HCatRecord> writeRecords;
private List<HCatFieldSchema> dataColumns;
+ private static final Logger LOG = LoggerFactory.getLogger(TestHCatDynamicPartitioned.class);
@Override
protected void initialize() throws Exception {
@@ -101,7 +98,7 @@ public class TestHCatDynamicPartitioned
runMRRead(4, "p1 = \"4\"");
// read from hive to test
-
+
String query = "select * from " + tableName;
int retCode = driver.run(query).getResponseCode();
@@ -113,7 +110,7 @@ public class TestHCatDynamicPartitioned
driver.getResults(res);
assertEquals(20, res.size());
-
+
//Test for duplicate publish
IOException exc = null;
try {
@@ -127,8 +124,8 @@ public class TestHCatDynamicPartitioned
assertTrue(exc instanceof HCatException);
assertTrue( "Got exception of type ["+((HCatException) exc).getErrorType().toString()
+ "] Expected ERROR_PUBLISHING_PARTITION or ERROR_MOVE_FAILED",
- (ErrorType.ERROR_PUBLISHING_PARTITION == ((HCatException) exc).getErrorType())
- || (ErrorType.ERROR_MOVE_FAILED == ((HCatException) exc).getErrorType())
+ (ErrorType.ERROR_PUBLISHING_PARTITION == ((HCatException) exc).getErrorType())
+ || (ErrorType.ERROR_MOVE_FAILED == ((HCatException) exc).getErrorType())
);
}
@@ -138,7 +135,7 @@ public class TestHCatDynamicPartitioned
HiveConf hc = new HiveConf(this.getClass());
int maxParts = hiveConf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS);
- System.out.println("Max partitions allowed = " + maxParts);
+ LOG.info("Max partitions allowed = {}", maxParts);
IOException exc = null;
try {